From 5c6190603c0429e7877f3056ae37f2182df30a52 Mon Sep 17 00:00:00 2001 From: Eric Schmidt Date: Tue, 15 Dec 2020 14:27:17 -0800 Subject: [PATCH 1/5] fix: blacken on library, test files --- docs/conf.py | 5 +- .../types/image_classification.py | 4 +- .../types/image_object_detection.py | 4 +- .../types/image_segmentation.py | 4 +- .../types/text_classification.py | 4 +- .../instance_v1beta1/types/text_extraction.py | 4 +- .../instance_v1beta1/types/text_sentiment.py | 4 +- .../types/video_action_recognition.py | 4 +- .../types/video_classification.py | 4 +- .../types/video_object_tracking.py | 4 +- .../types/image_classification.py | 4 +- .../types/image_object_detection.py | 4 +- .../types/image_segmentation.py | 4 +- .../types/video_action_recognition.py | 4 +- .../types/video_classification.py | 4 +- .../types/video_object_tracking.py | 4 +- .../types/classification.py | 4 +- .../types/image_object_detection.py | 10 +- .../types/image_segmentation.py | 4 +- .../types/tabular_classification.py | 4 +- .../types/tabular_regression.py | 4 +- .../types/text_extraction.py | 4 +- .../types/text_sentiment.py | 10 +- .../types/time_series_forecasting.py | 4 +- .../types/video_action_recognition.py | 20 +- .../types/video_classification.py | 20 +- .../types/video_object_tracking.py | 64 +- .../types/automl_forecasting.py | 20 +- .../types/automl_image_classification.py | 18 +- .../types/automl_image_object_detection.py | 18 +- .../types/automl_image_segmentation.py | 18 +- .../definition_v1beta1/types/automl_tables.py | 22 +- .../types/automl_text_classification.py | 9 +- .../types/automl_text_extraction.py | 11 +- .../types/automl_text_sentiment.py | 11 +- .../types/automl_video_action_recognition.py | 15 +- .../types/automl_video_classification.py | 15 +- .../types/automl_video_object_tracking.py | 15 +- .../export_evaluated_data_items_config.py | 4 +- .../services/dataset_service/async_client.py | 85 +- .../services/dataset_service/client.py | 157 +- .../dataset_service/transports/grpc.py | 3 +- .../services/endpoint_service/async_client.py | 54 +- .../services/endpoint_service/client.py | 115 +- .../endpoint_service/transports/grpc.py | 3 +- .../services/job_service/async_client.py | 152 +- .../services/job_service/client.py | 239 ++- .../services/job_service/transports/grpc.py | 3 +- .../migration_service/async_client.py | 19 +- .../services/migration_service/client.py | 126 +- .../migration_service/transports/grpc.py | 3 +- .../services/model_service/async_client.py | 85 +- .../services/model_service/client.py | 170 ++- .../services/model_service/transports/grpc.py | 3 +- .../services/pipeline_service/async_client.py | 38 +- .../services/pipeline_service/client.py | 107 +- .../pipeline_service/transports/grpc.py | 3 +- .../prediction_service/async_client.py | 14 +- .../services/prediction_service/client.py | 63 +- .../prediction_service/transports/grpc.py | 3 +- .../specialist_pool_service/async_client.py | 40 +- .../specialist_pool_service/client.py | 89 +- .../transports/grpc.py | 3 +- .../types/accelerator_type.py | 5 +- .../aiplatform_v1beta1/types/annotation.py | 27 +- .../types/annotation_spec.py | 19 +- .../types/batch_prediction_job.py | 100 +- .../types/completion_stats.py | 5 +- .../aiplatform_v1beta1/types/custom_job.py | 80 +- .../aiplatform_v1beta1/types/data_item.py | 23 +- .../types/data_labeling_job.py | 58 +- .../cloud/aiplatform_v1beta1/types/dataset.py | 34 +- .../types/dataset_service.py | 80 +- .../types/deployed_model_ref.py | 5 +- .../aiplatform_v1beta1/types/endpoint.py | 32 +- .../types/endpoint_service.py | 48 +- .../cloud/aiplatform_v1beta1/types/env_var.py | 5 +- .../aiplatform_v1beta1/types/explanation.py | 54 +- .../types/explanation_metadata.py | 32 +- .../types/hyperparameter_tuning_job.py | 57 +- .../aiplatform_v1beta1/types/job_service.py | 50 +- .../aiplatform_v1beta1/types/job_state.py | 5 +- .../types/machine_resources.py | 16 +- .../types/manual_batch_tuning_parameters.py | 4 +- .../types/migratable_resource.py | 33 +- .../types/migration_service.py | 30 +- .../cloud/aiplatform_v1beta1/types/model.py | 69 +- .../types/model_evaluation.py | 21 +- .../types/model_evaluation_slice.py | 25 +- .../aiplatform_v1beta1/types/model_service.py | 78 +- .../aiplatform_v1beta1/types/operation.py | 25 +- .../types/pipeline_service.py | 14 +- .../types/pipeline_state.py | 5 +- .../types/prediction_service.py | 40 +- .../types/specialist_pool.py | 5 +- .../types/specialist_pool_service.py | 32 +- .../cloud/aiplatform_v1beta1/types/study.py | 88 +- .../types/training_pipeline.py | 90 +- .../types/user_action_reference.py | 5 +- .../test_dataset_service.py | 824 +++++++--- .../test_endpoint_service.py | 477 ++++-- .../aiplatform_v1beta1/test_job_service.py | 1321 ++++++++++++----- .../test_migration_service.py | 210 ++- .../aiplatform_v1beta1/test_model_service.py | 788 +++++++--- .../test_pipeline_service.py | 381 +++-- .../test_prediction_service.py | 109 +- .../test_specialist_pool_service.py | 217 ++- 107 files changed, 5843 insertions(+), 1749 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 98e68be241..a6e4da0270 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -347,7 +347,10 @@ intersphinx_mapping = { "python": ("https://python.readthedocs.org/en/latest/", None), "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), "grpc": ("https://grpc.github.io/grpc/python/", None), "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), } diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/image_classification.py b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/image_classification.py index 84b1ef0bbe..c484150e69 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/image_classification.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/image_classification.py @@ -20,7 +20,9 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.instance", - manifest={"ImageClassificationPredictionInstance",}, + manifest={ + "ImageClassificationPredictionInstance", + }, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/image_object_detection.py b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/image_object_detection.py index 79c3efc2c6..8455fa581c 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/image_object_detection.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/image_object_detection.py @@ -20,7 +20,9 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.instance", - manifest={"ImageObjectDetectionPredictionInstance",}, + manifest={ + "ImageObjectDetectionPredictionInstance", + }, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/image_segmentation.py b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/image_segmentation.py index 5a3232c6d2..497b67b691 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/image_segmentation.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/image_segmentation.py @@ -20,7 +20,9 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.instance", - manifest={"ImageSegmentationPredictionInstance",}, + manifest={ + "ImageSegmentationPredictionInstance", + }, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/text_classification.py b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/text_classification.py index a615dc7e49..4f196ac220 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/text_classification.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/text_classification.py @@ -20,7 +20,9 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.instance", - manifest={"TextClassificationPredictionInstance",}, + manifest={ + "TextClassificationPredictionInstance", + }, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/text_extraction.py b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/text_extraction.py index c6fecf80b7..1077f8b8d7 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/text_extraction.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/text_extraction.py @@ -20,7 +20,9 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.instance", - manifest={"TextExtractionPredictionInstance",}, + manifest={ + "TextExtractionPredictionInstance", + }, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/text_sentiment.py b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/text_sentiment.py index 69836d0e96..00bd62fdeb 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/text_sentiment.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/text_sentiment.py @@ -20,7 +20,9 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.instance", - manifest={"TextSentimentPredictionInstance",}, + manifest={ + "TextSentimentPredictionInstance", + }, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/video_action_recognition.py b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/video_action_recognition.py index 89be6318f8..0e6d5afd6e 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/video_action_recognition.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/video_action_recognition.py @@ -20,7 +20,9 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.instance", - manifest={"VideoActionRecognitionPredictionInstance",}, + manifest={ + "VideoActionRecognitionPredictionInstance", + }, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/video_classification.py b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/video_classification.py index 41ab3bc217..32c0dff2f7 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/video_classification.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/video_classification.py @@ -20,7 +20,9 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.instance", - manifest={"VideoClassificationPredictionInstance",}, + manifest={ + "VideoClassificationPredictionInstance", + }, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/video_object_tracking.py b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/video_object_tracking.py index 3729c14816..4c6d0714bb 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/video_object_tracking.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/video_object_tracking.py @@ -20,7 +20,9 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.instance", - manifest={"VideoObjectTrackingPredictionInstance",}, + manifest={ + "VideoObjectTrackingPredictionInstance", + }, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/image_classification.py b/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/image_classification.py index 681a8c3d87..b8deb2a0c6 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/image_classification.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/image_classification.py @@ -20,7 +20,9 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.params", - manifest={"ImageClassificationPredictionParams",}, + manifest={ + "ImageClassificationPredictionParams", + }, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/image_object_detection.py b/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/image_object_detection.py index 146dd324b7..13bf3059b9 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/image_object_detection.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/image_object_detection.py @@ -20,7 +20,9 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.params", - manifest={"ImageObjectDetectionPredictionParams",}, + manifest={ + "ImageObjectDetectionPredictionParams", + }, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/image_segmentation.py b/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/image_segmentation.py index aa11739a61..3e24237e86 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/image_segmentation.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/image_segmentation.py @@ -20,7 +20,9 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.params", - manifest={"ImageSegmentationPredictionParams",}, + manifest={ + "ImageSegmentationPredictionParams", + }, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/video_action_recognition.py b/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/video_action_recognition.py index c1f8f9f3bc..7d8d6e1a82 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/video_action_recognition.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/video_action_recognition.py @@ -20,7 +20,9 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.params", - manifest={"VideoActionRecognitionPredictionParams",}, + manifest={ + "VideoActionRecognitionPredictionParams", + }, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/video_classification.py b/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/video_classification.py index 1b8d84a7d1..80149d426b 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/video_classification.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/video_classification.py @@ -20,7 +20,9 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.params", - manifest={"VideoClassificationPredictionParams",}, + manifest={ + "VideoClassificationPredictionParams", + }, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/video_object_tracking.py b/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/video_object_tracking.py index 4c0b6846bc..8aa3ff8384 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/video_object_tracking.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/video_object_tracking.py @@ -20,7 +20,9 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.params", - manifest={"VideoObjectTrackingPredictionParams",}, + manifest={ + "VideoObjectTrackingPredictionParams", + }, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/classification.py b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/classification.py index 3bfe82f64e..850779b6b7 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/classification.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/classification.py @@ -20,7 +20,9 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.prediction", - manifest={"ClassificationPredictionResult",}, + manifest={ + "ClassificationPredictionResult", + }, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/image_object_detection.py b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/image_object_detection.py index 1bf5002c2a..08cd977503 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/image_object_detection.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/image_object_detection.py @@ -23,7 +23,9 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.prediction", - manifest={"ImageObjectDetectionPredictionResult",}, + manifest={ + "ImageObjectDetectionPredictionResult", + }, ) @@ -58,7 +60,11 @@ class ImageObjectDetectionPredictionResult(proto.Message): confidences = proto.RepeatedField(proto.FLOAT, number=3) - bboxes = proto.RepeatedField(proto.MESSAGE, number=4, message=struct.ListValue,) + bboxes = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=struct.ListValue, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/image_segmentation.py b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/image_segmentation.py index 195dea6f79..a92a3805a3 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/image_segmentation.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/image_segmentation.py @@ -20,7 +20,9 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.prediction", - manifest={"ImageSegmentationPredictionResult",}, + manifest={ + "ImageSegmentationPredictionResult", + }, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/tabular_classification.py b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/tabular_classification.py index 4906ad59a5..759329db4b 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/tabular_classification.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/tabular_classification.py @@ -20,7 +20,9 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.prediction", - manifest={"TabularClassificationPredictionResult",}, + manifest={ + "TabularClassificationPredictionResult", + }, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/tabular_regression.py b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/tabular_regression.py index 71d535c1f0..ed7851e3bd 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/tabular_regression.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/tabular_regression.py @@ -20,7 +20,9 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.prediction", - manifest={"TabularRegressionPredictionResult",}, + manifest={ + "TabularRegressionPredictionResult", + }, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/text_extraction.py b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/text_extraction.py index e3c10b5d75..5450db2ffb 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/text_extraction.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/text_extraction.py @@ -20,7 +20,9 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.prediction", - manifest={"TextExtractionPredictionResult",}, + manifest={ + "TextExtractionPredictionResult", + }, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/text_sentiment.py b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/text_sentiment.py index 192e50419d..fcd296366f 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/text_sentiment.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/text_sentiment.py @@ -23,7 +23,9 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.prediction", - manifest={"TextSentimentPredictionResult",}, + manifest={ + "TextSentimentPredictionResult", + }, ) @@ -62,7 +64,11 @@ class Prediction(proto.Message): message=gcaspi_text_sentiment.TextSentimentPredictionInstance, ) - prediction = proto.Field(proto.MESSAGE, number=2, message=Prediction,) + prediction = proto.Field( + proto.MESSAGE, + number=2, + message=Prediction, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/time_series_forecasting.py b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/time_series_forecasting.py index 38bd8e3c85..eb30436beb 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/time_series_forecasting.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/time_series_forecasting.py @@ -20,7 +20,9 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.prediction", - manifest={"TimeSeriesForecastingPredictionResult",}, + manifest={ + "TimeSeriesForecastingPredictionResult", + }, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/video_action_recognition.py b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/video_action_recognition.py index f76b51899b..8105e21a87 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/video_action_recognition.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/video_action_recognition.py @@ -24,7 +24,9 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.prediction", - manifest={"VideoActionRecognitionPredictionResult",}, + manifest={ + "VideoActionRecognitionPredictionResult", + }, ) @@ -63,12 +65,22 @@ class VideoActionRecognitionPredictionResult(proto.Message): display_name = proto.Field(proto.STRING, number=2) time_segment_start = proto.Field( - proto.MESSAGE, number=4, message=duration.Duration, + proto.MESSAGE, + number=4, + message=duration.Duration, ) - time_segment_end = proto.Field(proto.MESSAGE, number=5, message=duration.Duration,) + time_segment_end = proto.Field( + proto.MESSAGE, + number=5, + message=duration.Duration, + ) - confidence = proto.Field(proto.MESSAGE, number=6, message=wrappers.FloatValue,) + confidence = proto.Field( + proto.MESSAGE, + number=6, + message=wrappers.FloatValue, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/video_classification.py b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/video_classification.py index 469023b122..dbee575ef5 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/video_classification.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/video_classification.py @@ -24,7 +24,9 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.prediction", - manifest={"VideoClassificationPredictionResult",}, + manifest={ + "VideoClassificationPredictionResult", + }, ) @@ -79,12 +81,22 @@ class VideoClassificationPredictionResult(proto.Message): type_ = proto.Field(proto.STRING, number=3) time_segment_start = proto.Field( - proto.MESSAGE, number=4, message=duration.Duration, + proto.MESSAGE, + number=4, + message=duration.Duration, ) - time_segment_end = proto.Field(proto.MESSAGE, number=5, message=duration.Duration,) + time_segment_end = proto.Field( + proto.MESSAGE, + number=5, + message=duration.Duration, + ) - confidence = proto.Field(proto.MESSAGE, number=6, message=wrappers.FloatValue,) + confidence = proto.Field( + proto.MESSAGE, + number=6, + message=wrappers.FloatValue, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/video_object_tracking.py b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/video_object_tracking.py index 026f80a325..2a05724028 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/video_object_tracking.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/video_object_tracking.py @@ -24,7 +24,9 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.prediction", - manifest={"VideoObjectTrackingPredictionResult",}, + manifest={ + "VideoObjectTrackingPredictionResult", + }, ) @@ -87,29 +89,63 @@ class Frame(proto.Message): box. """ - time_offset = proto.Field(proto.MESSAGE, number=1, message=duration.Duration,) - - x_min = proto.Field(proto.MESSAGE, number=2, message=wrappers.FloatValue,) - - x_max = proto.Field(proto.MESSAGE, number=3, message=wrappers.FloatValue,) - - y_min = proto.Field(proto.MESSAGE, number=4, message=wrappers.FloatValue,) - - y_max = proto.Field(proto.MESSAGE, number=5, message=wrappers.FloatValue,) + time_offset = proto.Field( + proto.MESSAGE, + number=1, + message=duration.Duration, + ) + + x_min = proto.Field( + proto.MESSAGE, + number=2, + message=wrappers.FloatValue, + ) + + x_max = proto.Field( + proto.MESSAGE, + number=3, + message=wrappers.FloatValue, + ) + + y_min = proto.Field( + proto.MESSAGE, + number=4, + message=wrappers.FloatValue, + ) + + y_max = proto.Field( + proto.MESSAGE, + number=5, + message=wrappers.FloatValue, + ) id = proto.Field(proto.STRING, number=1) display_name = proto.Field(proto.STRING, number=2) time_segment_start = proto.Field( - proto.MESSAGE, number=3, message=duration.Duration, + proto.MESSAGE, + number=3, + message=duration.Duration, ) - time_segment_end = proto.Field(proto.MESSAGE, number=4, message=duration.Duration,) + time_segment_end = proto.Field( + proto.MESSAGE, + number=4, + message=duration.Duration, + ) - confidence = proto.Field(proto.MESSAGE, number=5, message=wrappers.FloatValue,) + confidence = proto.Field( + proto.MESSAGE, + number=5, + message=wrappers.FloatValue, + ) - frames = proto.RepeatedField(proto.MESSAGE, number=6, message=Frame,) + frames = proto.RepeatedField( + proto.MESSAGE, + number=6, + message=Frame, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_forecasting.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_forecasting.py index 40c549dc5f..337138d774 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_forecasting.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_forecasting.py @@ -44,10 +44,16 @@ class AutoMlForecasting(proto.Message): The metadata information. """ - inputs = proto.Field(proto.MESSAGE, number=1, message="AutoMlForecastingInputs",) + inputs = proto.Field( + proto.MESSAGE, + number=1, + message="AutoMlForecastingInputs", + ) metadata = proto.Field( - proto.MESSAGE, number=2, message="AutoMlForecastingMetadata", + proto.MESSAGE, + number=2, + message="AutoMlForecastingMetadata", ) @@ -439,7 +445,9 @@ class Period(proto.Message): time_column = proto.Field(proto.STRING, number=3) transformations = proto.RepeatedField( - proto.MESSAGE, number=4, message=Transformation, + proto.MESSAGE, + number=4, + message=Transformation, ) optimization_objective = proto.Field(proto.STRING, number=5) @@ -454,7 +462,11 @@ class Period(proto.Message): time_variant_past_and_future_columns = proto.RepeatedField(proto.STRING, number=10) - period = proto.Field(proto.MESSAGE, number=11, message=Period,) + period = proto.Field( + proto.MESSAGE, + number=11, + message=Period, + ) forecast_window_start = proto.Field(proto.INT64, number=12) diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_classification.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_classification.py index 0ee0394192..57fb8fd17c 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_classification.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_classification.py @@ -40,11 +40,15 @@ class AutoMlImageClassification(proto.Message): """ inputs = proto.Field( - proto.MESSAGE, number=1, message="AutoMlImageClassificationInputs", + proto.MESSAGE, + number=1, + message="AutoMlImageClassificationInputs", ) metadata = proto.Field( - proto.MESSAGE, number=2, message="AutoMlImageClassificationMetadata", + proto.MESSAGE, + number=2, + message="AutoMlImageClassificationMetadata", ) @@ -101,7 +105,11 @@ class ModelType(proto.Enum): MOBILE_TF_VERSATILE_1 = 3 MOBILE_TF_HIGH_ACCURACY_1 = 4 - model_type = proto.Field(proto.ENUM, number=1, enum=ModelType,) + model_type = proto.Field( + proto.ENUM, + number=1, + enum=ModelType, + ) base_model_id = proto.Field(proto.STRING, number=2) @@ -136,7 +144,9 @@ class SuccessfulStopReason(proto.Enum): cost_milli_node_hours = proto.Field(proto.INT64, number=1) successful_stop_reason = proto.Field( - proto.ENUM, number=2, enum=SuccessfulStopReason, + proto.ENUM, + number=2, + enum=SuccessfulStopReason, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_object_detection.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_object_detection.py index 3fb9d3ae1d..420e4a4a31 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_object_detection.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_object_detection.py @@ -40,11 +40,15 @@ class AutoMlImageObjectDetection(proto.Message): """ inputs = proto.Field( - proto.MESSAGE, number=1, message="AutoMlImageObjectDetectionInputs", + proto.MESSAGE, + number=1, + message="AutoMlImageObjectDetectionInputs", ) metadata = proto.Field( - proto.MESSAGE, number=2, message="AutoMlImageObjectDetectionMetadata", + proto.MESSAGE, + number=2, + message="AutoMlImageObjectDetectionMetadata", ) @@ -90,7 +94,11 @@ class ModelType(proto.Enum): MOBILE_TF_VERSATILE_1 = 4 MOBILE_TF_HIGH_ACCURACY_1 = 5 - model_type = proto.Field(proto.ENUM, number=1, enum=ModelType,) + model_type = proto.Field( + proto.ENUM, + number=1, + enum=ModelType, + ) budget_milli_node_hours = proto.Field(proto.INT64, number=2) @@ -121,7 +129,9 @@ class SuccessfulStopReason(proto.Enum): cost_milli_node_hours = proto.Field(proto.INT64, number=1) successful_stop_reason = proto.Field( - proto.ENUM, number=2, enum=SuccessfulStopReason, + proto.ENUM, + number=2, + enum=SuccessfulStopReason, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_segmentation.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_segmentation.py index 0fa3788b11..c767f4272b 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_segmentation.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_segmentation.py @@ -40,11 +40,15 @@ class AutoMlImageSegmentation(proto.Message): """ inputs = proto.Field( - proto.MESSAGE, number=1, message="AutoMlImageSegmentationInputs", + proto.MESSAGE, + number=1, + message="AutoMlImageSegmentationInputs", ) metadata = proto.Field( - proto.MESSAGE, number=2, message="AutoMlImageSegmentationMetadata", + proto.MESSAGE, + number=2, + message="AutoMlImageSegmentationMetadata", ) @@ -83,7 +87,11 @@ class ModelType(proto.Enum): CLOUD_HIGH_ACCURACY_1 = 1 CLOUD_LOW_ACCURACY_1 = 2 - model_type = proto.Field(proto.ENUM, number=1, enum=ModelType,) + model_type = proto.Field( + proto.ENUM, + number=1, + enum=ModelType, + ) budget_milli_node_hours = proto.Field(proto.INT64, number=2) @@ -114,7 +122,9 @@ class SuccessfulStopReason(proto.Enum): cost_milli_node_hours = proto.Field(proto.INT64, number=1) successful_stop_reason = proto.Field( - proto.ENUM, number=2, enum=SuccessfulStopReason, + proto.ENUM, + number=2, + enum=SuccessfulStopReason, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_tables.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_tables.py index 55d620b32e..362b3613fd 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_tables.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_tables.py @@ -25,7 +25,11 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.trainingjob.definition", - manifest={"AutoMlTables", "AutoMlTablesInputs", "AutoMlTablesMetadata",}, + manifest={ + "AutoMlTables", + "AutoMlTablesInputs", + "AutoMlTablesMetadata", + }, ) @@ -39,9 +43,17 @@ class AutoMlTables(proto.Message): The metadata information. """ - inputs = proto.Field(proto.MESSAGE, number=1, message="AutoMlTablesInputs",) + inputs = proto.Field( + proto.MESSAGE, + number=1, + message="AutoMlTablesInputs", + ) - metadata = proto.Field(proto.MESSAGE, number=2, message="AutoMlTablesMetadata",) + metadata = proto.Field( + proto.MESSAGE, + number=2, + message="AutoMlTablesMetadata", + ) class AutoMlTablesInputs(proto.Message): @@ -412,7 +424,9 @@ class TextArrayTransformation(proto.Message): target_column = proto.Field(proto.STRING, number=2) transformations = proto.RepeatedField( - proto.MESSAGE, number=3, message=Transformation, + proto.MESSAGE, + number=3, + message=Transformation, ) optimization_objective = proto.Field(proto.STRING, number=4) diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_text_classification.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_text_classification.py index ca75734600..8b7c29d198 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_text_classification.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_text_classification.py @@ -20,7 +20,10 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.trainingjob.definition", - manifest={"AutoMlTextClassification", "AutoMlTextClassificationInputs",}, + manifest={ + "AutoMlTextClassification", + "AutoMlTextClassificationInputs", + }, ) @@ -34,7 +37,9 @@ class AutoMlTextClassification(proto.Message): """ inputs = proto.Field( - proto.MESSAGE, number=1, message="AutoMlTextClassificationInputs", + proto.MESSAGE, + number=1, + message="AutoMlTextClassificationInputs", ) diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_text_extraction.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_text_extraction.py index 336509af22..c1e44e4630 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_text_extraction.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_text_extraction.py @@ -20,7 +20,10 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.trainingjob.definition", - manifest={"AutoMlTextExtraction", "AutoMlTextExtractionInputs",}, + manifest={ + "AutoMlTextExtraction", + "AutoMlTextExtractionInputs", + }, ) @@ -33,7 +36,11 @@ class AutoMlTextExtraction(proto.Message): The input parameters of this TrainingJob. """ - inputs = proto.Field(proto.MESSAGE, number=1, message="AutoMlTextExtractionInputs",) + inputs = proto.Field( + proto.MESSAGE, + number=1, + message="AutoMlTextExtractionInputs", + ) class AutoMlTextExtractionInputs(proto.Message): diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_text_sentiment.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_text_sentiment.py index d5de97e2b2..d1b936a361 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_text_sentiment.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_text_sentiment.py @@ -20,7 +20,10 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.trainingjob.definition", - manifest={"AutoMlTextSentiment", "AutoMlTextSentimentInputs",}, + manifest={ + "AutoMlTextSentiment", + "AutoMlTextSentimentInputs", + }, ) @@ -33,7 +36,11 @@ class AutoMlTextSentiment(proto.Message): The input parameters of this TrainingJob. """ - inputs = proto.Field(proto.MESSAGE, number=1, message="AutoMlTextSentimentInputs",) + inputs = proto.Field( + proto.MESSAGE, + number=1, + message="AutoMlTextSentimentInputs", + ) class AutoMlTextSentimentInputs(proto.Message): diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_action_recognition.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_action_recognition.py index d6969d93c6..0c5ae5f629 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_action_recognition.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_action_recognition.py @@ -20,7 +20,10 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.trainingjob.definition", - manifest={"AutoMlVideoActionRecognition", "AutoMlVideoActionRecognitionInputs",}, + manifest={ + "AutoMlVideoActionRecognition", + "AutoMlVideoActionRecognitionInputs", + }, ) @@ -34,7 +37,9 @@ class AutoMlVideoActionRecognition(proto.Message): """ inputs = proto.Field( - proto.MESSAGE, number=1, message="AutoMlVideoActionRecognitionInputs", + proto.MESSAGE, + number=1, + message="AutoMlVideoActionRecognitionInputs", ) @@ -52,7 +57,11 @@ class ModelType(proto.Enum): CLOUD = 1 MOBILE_VERSATILE_1 = 2 - model_type = proto.Field(proto.ENUM, number=1, enum=ModelType,) + model_type = proto.Field( + proto.ENUM, + number=1, + enum=ModelType, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_classification.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_classification.py index 3164544d47..4e06caf015 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_classification.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_classification.py @@ -20,7 +20,10 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.trainingjob.definition", - manifest={"AutoMlVideoClassification", "AutoMlVideoClassificationInputs",}, + manifest={ + "AutoMlVideoClassification", + "AutoMlVideoClassificationInputs", + }, ) @@ -34,7 +37,9 @@ class AutoMlVideoClassification(proto.Message): """ inputs = proto.Field( - proto.MESSAGE, number=1, message="AutoMlVideoClassificationInputs", + proto.MESSAGE, + number=1, + message="AutoMlVideoClassificationInputs", ) @@ -52,7 +57,11 @@ class ModelType(proto.Enum): CLOUD = 1 MOBILE_VERSATILE_1 = 2 - model_type = proto.Field(proto.ENUM, number=1, enum=ModelType,) + model_type = proto.Field( + proto.ENUM, + number=1, + enum=ModelType, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_object_tracking.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_object_tracking.py index 0fd8c7ec7a..e351db59d3 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_object_tracking.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_object_tracking.py @@ -20,7 +20,10 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.trainingjob.definition", - manifest={"AutoMlVideoObjectTracking", "AutoMlVideoObjectTrackingInputs",}, + manifest={ + "AutoMlVideoObjectTracking", + "AutoMlVideoObjectTrackingInputs", + }, ) @@ -34,7 +37,9 @@ class AutoMlVideoObjectTracking(proto.Message): """ inputs = proto.Field( - proto.MESSAGE, number=1, message="AutoMlVideoObjectTrackingInputs", + proto.MESSAGE, + number=1, + message="AutoMlVideoObjectTrackingInputs", ) @@ -56,7 +61,11 @@ class ModelType(proto.Enum): MOBILE_JETSON_VERSATILE_1 = 5 MOBILE_JETSON_LOW_LATENCY_1 = 6 - model_type = proto.Field(proto.ENUM, number=1, enum=ModelType,) + model_type = proto.Field( + proto.ENUM, + number=1, + enum=ModelType, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/export_evaluated_data_items_config.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/export_evaluated_data_items_config.py index 29bc547adf..4d8070c737 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/export_evaluated_data_items_config.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/export_evaluated_data_items_config.py @@ -20,7 +20,9 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.trainingjob.definition", - manifest={"ExportEvaluatedDataItemsConfig",}, + manifest={ + "ExportEvaluatedDataItemsConfig", + }, ) diff --git a/google/cloud/aiplatform_v1beta1/services/dataset_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/dataset_service/async_client.py index 1927709f30..d0139b3003 100644 --- a/google/cloud/aiplatform_v1beta1/services/dataset_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/dataset_service/async_client.py @@ -239,7 +239,12 @@ async def create_dataset( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -319,7 +324,12 @@ async def get_dataset( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -409,7 +419,12 @@ async def update_dataset( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -484,12 +499,20 @@ async def list_datasets( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListDatasetsAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -578,7 +601,12 @@ async def delete_dataset( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -673,7 +701,12 @@ async def import_data( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -766,7 +799,12 @@ async def export_data( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -850,12 +888,20 @@ async def list_data_items( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListDataItemsAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -930,7 +976,12 @@ async def get_annotation_spec( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1007,12 +1058,20 @@ async def list_annotations( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListAnnotationsAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. diff --git a/google/cloud/aiplatform_v1beta1/services/dataset_service/client.py b/google/cloud/aiplatform_v1beta1/services/dataset_service/client.py index 1e63153291..b97aa5385c 100644 --- a/google/cloud/aiplatform_v1beta1/services/dataset_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/dataset_service/client.py @@ -66,7 +66,10 @@ class DatasetServiceClientMeta(type): _transport_registry["grpc"] = DatasetServiceGrpcTransport _transport_registry["grpc_asyncio"] = DatasetServiceGrpcAsyncIOTransport - def get_transport_class(cls, label: str = None,) -> Type[DatasetServiceTransport]: + def get_transport_class( + cls, + label: str = None, + ) -> Type[DatasetServiceTransport]: """Return an appropriate transport class. Args: @@ -153,7 +156,11 @@ def transport(self) -> DatasetServiceTransport: @staticmethod def annotation_path( - project: str, location: str, dataset: str, data_item: str, annotation: str, + project: str, + location: str, + dataset: str, + data_item: str, + annotation: str, ) -> str: """Return a fully-qualified annotation string.""" return "projects/{project}/locations/{location}/datasets/{dataset}/dataItems/{data_item}/annotations/{annotation}".format( @@ -175,7 +182,10 @@ def parse_annotation_path(path: str) -> Dict[str, str]: @staticmethod def annotation_spec_path( - project: str, location: str, dataset: str, annotation_spec: str, + project: str, + location: str, + dataset: str, + annotation_spec: str, ) -> str: """Return a fully-qualified annotation_spec string.""" return "projects/{project}/locations/{location}/datasets/{dataset}/annotationSpecs/{annotation_spec}".format( @@ -196,11 +206,17 @@ def parse_annotation_spec_path(path: str) -> Dict[str, str]: @staticmethod def data_item_path( - project: str, location: str, dataset: str, data_item: str, + project: str, + location: str, + dataset: str, + data_item: str, ) -> str: """Return a fully-qualified data_item string.""" return "projects/{project}/locations/{location}/datasets/{dataset}/dataItems/{data_item}".format( - project=project, location=location, dataset=dataset, data_item=data_item, + project=project, + location=location, + dataset=dataset, + data_item=data_item, ) @staticmethod @@ -213,10 +229,16 @@ def parse_data_item_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def dataset_path(project: str, location: str, dataset: str,) -> str: + def dataset_path( + project: str, + location: str, + dataset: str, + ) -> str: """Return a fully-qualified dataset string.""" return "projects/{project}/locations/{location}/datasets/{dataset}".format( - project=project, location=location, dataset=dataset, + project=project, + location=location, + dataset=dataset, ) @staticmethod @@ -229,7 +251,9 @@ def parse_dataset_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str,) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Return a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -242,9 +266,13 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str,) -> str: + def common_folder_path( + folder: str, + ) -> str: """Return a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder,) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -253,9 +281,13 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str,) -> str: + def common_organization_path( + organization: str, + ) -> str: """Return a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization,) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -264,9 +296,13 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str,) -> str: + def common_project_path( + project: str, + ) -> str: """Return a fully-qualified project string.""" - return "projects/{project}".format(project=project,) + return "projects/{project}".format( + project=project, + ) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -275,10 +311,14 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str,) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Return a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) @staticmethod @@ -484,7 +524,12 @@ def create_dataset( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -565,7 +610,12 @@ def get_dataset( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -656,7 +706,12 @@ def update_dataset( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -732,12 +787,20 @@ def list_datasets( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListDatasetsPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -827,7 +890,12 @@ def delete_dataset( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -923,7 +991,12 @@ def import_data( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -1017,7 +1090,12 @@ def export_data( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -1102,12 +1180,20 @@ def list_data_items( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListDataItemsPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -1183,7 +1269,12 @@ def get_annotation_spec( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1261,12 +1352,20 @@ def list_annotations( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListAnnotationsPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. diff --git a/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/grpc.py b/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/grpc.py index 2647c4bd9c..7120c2eb9a 100644 --- a/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/grpc.py @@ -228,8 +228,7 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property diff --git a/google/cloud/aiplatform_v1beta1/services/endpoint_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/endpoint_service/async_client.py index 9c6af3bd16..3afd01ea0c 100644 --- a/google/cloud/aiplatform_v1beta1/services/endpoint_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/endpoint_service/async_client.py @@ -230,7 +230,12 @@ async def create_endpoint( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -311,7 +316,12 @@ async def get_endpoint( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -387,12 +397,20 @@ async def list_endpoints( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListEndpointsAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -477,7 +495,12 @@ async def update_endpoint( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -565,7 +588,12 @@ async def delete_endpoint( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -689,7 +717,12 @@ async def deploy_model( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -804,7 +837,12 @@ async def undeploy_model( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( diff --git a/google/cloud/aiplatform_v1beta1/services/endpoint_service/client.py b/google/cloud/aiplatform_v1beta1/services/endpoint_service/client.py index 5ea003b827..28a8f6ab78 100644 --- a/google/cloud/aiplatform_v1beta1/services/endpoint_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/endpoint_service/client.py @@ -62,7 +62,10 @@ class EndpointServiceClientMeta(type): _transport_registry["grpc"] = EndpointServiceGrpcTransport _transport_registry["grpc_asyncio"] = EndpointServiceGrpcAsyncIOTransport - def get_transport_class(cls, label: str = None,) -> Type[EndpointServiceTransport]: + def get_transport_class( + cls, + label: str = None, + ) -> Type[EndpointServiceTransport]: """Return an appropriate transport class. Args: @@ -148,10 +151,16 @@ def transport(self) -> EndpointServiceTransport: return self._transport @staticmethod - def endpoint_path(project: str, location: str, endpoint: str,) -> str: + def endpoint_path( + project: str, + location: str, + endpoint: str, + ) -> str: """Return a fully-qualified endpoint string.""" return "projects/{project}/locations/{location}/endpoints/{endpoint}".format( - project=project, location=location, endpoint=endpoint, + project=project, + location=location, + endpoint=endpoint, ) @staticmethod @@ -164,10 +173,16 @@ def parse_endpoint_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def model_path(project: str, location: str, model: str,) -> str: + def model_path( + project: str, + location: str, + model: str, + ) -> str: """Return a fully-qualified model string.""" return "projects/{project}/locations/{location}/models/{model}".format( - project=project, location=location, model=model, + project=project, + location=location, + model=model, ) @staticmethod @@ -180,7 +195,9 @@ def parse_model_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str,) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Return a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -193,9 +210,13 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str,) -> str: + def common_folder_path( + folder: str, + ) -> str: """Return a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder,) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -204,9 +225,13 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str,) -> str: + def common_organization_path( + organization: str, + ) -> str: """Return a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization,) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -215,9 +240,13 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str,) -> str: + def common_project_path( + project: str, + ) -> str: """Return a fully-qualified project string.""" - return "projects/{project}".format(project=project,) + return "projects/{project}".format( + project=project, + ) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -226,10 +255,14 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str,) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Return a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) @staticmethod @@ -436,7 +469,12 @@ def create_endpoint( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -518,7 +556,12 @@ def get_endpoint( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -595,12 +638,20 @@ def list_endpoints( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListEndpointsPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -686,7 +737,12 @@ def update_endpoint( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -775,7 +831,12 @@ def delete_endpoint( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -900,7 +961,12 @@ def deploy_model( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -1016,7 +1082,12 @@ def undeploy_model( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = ga_operation.from_gapic( diff --git a/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/grpc.py b/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/grpc.py index 70915facf0..5a2dee4d5a 100644 --- a/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/grpc.py @@ -227,8 +227,7 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property diff --git a/google/cloud/aiplatform_v1beta1/services/job_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/job_service/async_client.py index 2a24748d11..258cd49a51 100644 --- a/google/cloud/aiplatform_v1beta1/services/job_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/job_service/async_client.py @@ -263,7 +263,12 @@ async def create_custom_job( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -341,7 +346,12 @@ async def get_custom_job( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -417,12 +427,20 @@ async def list_custom_jobs( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListCustomJobsAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -511,7 +529,12 @@ async def delete_custom_job( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -598,7 +621,10 @@ async def cancel_custom_job( # Send the request. await rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) async def create_data_labeling_job( @@ -678,7 +704,12 @@ async def create_data_labeling_job( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -752,7 +783,12 @@ async def get_data_labeling_job( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -827,12 +863,20 @@ async def list_data_labeling_jobs( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListDataLabelingJobsAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -922,7 +966,12 @@ async def delete_data_labeling_job( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -999,7 +1048,10 @@ async def cancel_data_labeling_job( # Send the request. await rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) async def create_hyperparameter_tuning_job( @@ -1081,7 +1133,12 @@ async def create_hyperparameter_tuning_job( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1157,7 +1214,12 @@ async def get_hyperparameter_tuning_job( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1233,12 +1295,20 @@ async def list_hyperparameter_tuning_jobs( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListHyperparameterTuningJobsAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -1328,7 +1398,12 @@ async def delete_hyperparameter_tuning_job( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -1418,7 +1493,10 @@ async def cancel_hyperparameter_tuning_job( # Send the request. await rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) async def create_batch_prediction_job( @@ -1504,7 +1582,12 @@ async def create_batch_prediction_job( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1583,7 +1666,12 @@ async def get_batch_prediction_job( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1659,12 +1747,20 @@ async def list_batch_prediction_jobs( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListBatchPredictionJobsAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -1755,7 +1851,12 @@ async def delete_batch_prediction_job( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -1843,7 +1944,10 @@ async def cancel_batch_prediction_job( # Send the request. await rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) diff --git a/google/cloud/aiplatform_v1beta1/services/job_service/client.py b/google/cloud/aiplatform_v1beta1/services/job_service/client.py index a1eb7c38ce..9e73a6bf73 100644 --- a/google/cloud/aiplatform_v1beta1/services/job_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/job_service/client.py @@ -80,7 +80,10 @@ class JobServiceClientMeta(type): _transport_registry["grpc"] = JobServiceGrpcTransport _transport_registry["grpc_asyncio"] = JobServiceGrpcAsyncIOTransport - def get_transport_class(cls, label: str = None,) -> Type[JobServiceTransport]: + def get_transport_class( + cls, + label: str = None, + ) -> Type[JobServiceTransport]: """Return an appropriate transport class. Args: @@ -167,7 +170,9 @@ def transport(self) -> JobServiceTransport: @staticmethod def batch_prediction_job_path( - project: str, location: str, batch_prediction_job: str, + project: str, + location: str, + batch_prediction_job: str, ) -> str: """Return a fully-qualified batch_prediction_job string.""" return "projects/{project}/locations/{location}/batchPredictionJobs/{batch_prediction_job}".format( @@ -186,10 +191,16 @@ def parse_batch_prediction_job_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def custom_job_path(project: str, location: str, custom_job: str,) -> str: + def custom_job_path( + project: str, + location: str, + custom_job: str, + ) -> str: """Return a fully-qualified custom_job string.""" return "projects/{project}/locations/{location}/customJobs/{custom_job}".format( - project=project, location=location, custom_job=custom_job, + project=project, + location=location, + custom_job=custom_job, ) @staticmethod @@ -203,11 +214,15 @@ def parse_custom_job_path(path: str) -> Dict[str, str]: @staticmethod def data_labeling_job_path( - project: str, location: str, data_labeling_job: str, + project: str, + location: str, + data_labeling_job: str, ) -> str: """Return a fully-qualified data_labeling_job string.""" return "projects/{project}/locations/{location}/dataLabelingJobs/{data_labeling_job}".format( - project=project, location=location, data_labeling_job=data_labeling_job, + project=project, + location=location, + data_labeling_job=data_labeling_job, ) @staticmethod @@ -220,10 +235,16 @@ def parse_data_labeling_job_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def dataset_path(project: str, location: str, dataset: str,) -> str: + def dataset_path( + project: str, + location: str, + dataset: str, + ) -> str: """Return a fully-qualified dataset string.""" return "projects/{project}/locations/{location}/datasets/{dataset}".format( - project=project, location=location, dataset=dataset, + project=project, + location=location, + dataset=dataset, ) @staticmethod @@ -237,7 +258,9 @@ def parse_dataset_path(path: str) -> Dict[str, str]: @staticmethod def hyperparameter_tuning_job_path( - project: str, location: str, hyperparameter_tuning_job: str, + project: str, + location: str, + hyperparameter_tuning_job: str, ) -> str: """Return a fully-qualified hyperparameter_tuning_job string.""" return "projects/{project}/locations/{location}/hyperparameterTuningJobs/{hyperparameter_tuning_job}".format( @@ -256,10 +279,16 @@ def parse_hyperparameter_tuning_job_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def model_path(project: str, location: str, model: str,) -> str: + def model_path( + project: str, + location: str, + model: str, + ) -> str: """Return a fully-qualified model string.""" return "projects/{project}/locations/{location}/models/{model}".format( - project=project, location=location, model=model, + project=project, + location=location, + model=model, ) @staticmethod @@ -272,7 +301,9 @@ def parse_model_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str,) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Return a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -285,9 +316,13 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str,) -> str: + def common_folder_path( + folder: str, + ) -> str: """Return a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder,) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -296,9 +331,13 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str,) -> str: + def common_organization_path( + organization: str, + ) -> str: """Return a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization,) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -307,9 +346,13 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str,) -> str: + def common_project_path( + project: str, + ) -> str: """Return a fully-qualified project string.""" - return "projects/{project}".format(project=project,) + return "projects/{project}".format( + project=project, + ) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -318,10 +361,14 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str,) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Return a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) @staticmethod @@ -531,7 +578,12 @@ def create_custom_job( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -610,7 +662,12 @@ def get_custom_job( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -687,12 +744,20 @@ def list_custom_jobs( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListCustomJobsPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -782,7 +847,12 @@ def delete_custom_job( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -870,7 +940,10 @@ def cancel_custom_job( # Send the request. rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) def create_data_labeling_job( @@ -951,7 +1024,12 @@ def create_data_labeling_job( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1026,7 +1104,12 @@ def get_data_labeling_job( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1102,12 +1185,20 @@ def list_data_labeling_jobs( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListDataLabelingJobsPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -1198,7 +1289,12 @@ def delete_data_labeling_job( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -1276,7 +1372,10 @@ def cancel_data_labeling_job( # Send the request. rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) def create_hyperparameter_tuning_job( @@ -1361,7 +1460,12 @@ def create_hyperparameter_tuning_job( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1440,7 +1544,12 @@ def get_hyperparameter_tuning_job( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1519,12 +1628,20 @@ def list_hyperparameter_tuning_jobs( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListHyperparameterTuningJobsPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -1617,7 +1734,12 @@ def delete_hyperparameter_tuning_job( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -1710,7 +1832,10 @@ def cancel_hyperparameter_tuning_job( # Send the request. rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) def create_batch_prediction_job( @@ -1799,7 +1924,12 @@ def create_batch_prediction_job( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1879,7 +2009,12 @@ def get_batch_prediction_job( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1958,12 +2093,20 @@ def list_batch_prediction_jobs( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListBatchPredictionJobsPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -2057,7 +2200,12 @@ def delete_batch_prediction_job( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -2148,7 +2296,10 @@ def cancel_batch_prediction_job( # Send the request. rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) diff --git a/google/cloud/aiplatform_v1beta1/services/job_service/transports/grpc.py b/google/cloud/aiplatform_v1beta1/services/job_service/transports/grpc.py index f4b610bd53..859efdd7e7 100644 --- a/google/cloud/aiplatform_v1beta1/services/job_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1beta1/services/job_service/transports/grpc.py @@ -242,8 +242,7 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property diff --git a/google/cloud/aiplatform_v1beta1/services/migration_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/migration_service/async_client.py index af13c4d4fb..0f2348ac38 100644 --- a/google/cloud/aiplatform_v1beta1/services/migration_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/migration_service/async_client.py @@ -236,12 +236,20 @@ async def search_migratable_resources( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.SearchMigratableResourcesAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -336,7 +344,12 @@ async def batch_migrate_resources( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( diff --git a/google/cloud/aiplatform_v1beta1/services/migration_service/client.py b/google/cloud/aiplatform_v1beta1/services/migration_service/client.py index bf1f8e5c6b..116a987f86 100644 --- a/google/cloud/aiplatform_v1beta1/services/migration_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/migration_service/client.py @@ -57,7 +57,10 @@ class MigrationServiceClientMeta(type): _transport_registry["grpc"] = MigrationServiceGrpcTransport _transport_registry["grpc_asyncio"] = MigrationServiceGrpcAsyncIOTransport - def get_transport_class(cls, label: str = None,) -> Type[MigrationServiceTransport]: + def get_transport_class( + cls, + label: str = None, + ) -> Type[MigrationServiceTransport]: """Return an appropriate transport class. Args: @@ -147,11 +150,15 @@ def transport(self) -> MigrationServiceTransport: @staticmethod def annotated_dataset_path( - project: str, dataset: str, annotated_dataset: str, + project: str, + dataset: str, + annotated_dataset: str, ) -> str: """Return a fully-qualified annotated_dataset string.""" return "projects/{project}/datasets/{dataset}/annotatedDatasets/{annotated_dataset}".format( - project=project, dataset=dataset, annotated_dataset=annotated_dataset, + project=project, + dataset=dataset, + annotated_dataset=annotated_dataset, ) @staticmethod @@ -164,10 +171,16 @@ def parse_annotated_dataset_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def dataset_path(project: str, location: str, dataset: str,) -> str: + def dataset_path( + project: str, + location: str, + dataset: str, + ) -> str: """Return a fully-qualified dataset string.""" return "projects/{project}/locations/{location}/datasets/{dataset}".format( - project=project, location=location, dataset=dataset, + project=project, + location=location, + dataset=dataset, ) @staticmethod @@ -180,10 +193,14 @@ def parse_dataset_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def dataset_path(project: str, dataset: str,) -> str: + def dataset_path( + project: str, + dataset: str, + ) -> str: """Return a fully-qualified dataset string.""" return "projects/{project}/datasets/{dataset}".format( - project=project, dataset=dataset, + project=project, + dataset=dataset, ) @staticmethod @@ -193,10 +210,16 @@ def parse_dataset_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def dataset_path(project: str, location: str, dataset: str,) -> str: + def dataset_path( + project: str, + location: str, + dataset: str, + ) -> str: """Return a fully-qualified dataset string.""" return "projects/{project}/locations/{location}/datasets/{dataset}".format( - project=project, location=location, dataset=dataset, + project=project, + location=location, + dataset=dataset, ) @staticmethod @@ -209,10 +232,16 @@ def parse_dataset_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def model_path(project: str, location: str, model: str,) -> str: + def model_path( + project: str, + location: str, + model: str, + ) -> str: """Return a fully-qualified model string.""" return "projects/{project}/locations/{location}/models/{model}".format( - project=project, location=location, model=model, + project=project, + location=location, + model=model, ) @staticmethod @@ -225,10 +254,16 @@ def parse_model_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def model_path(project: str, location: str, model: str,) -> str: + def model_path( + project: str, + location: str, + model: str, + ) -> str: """Return a fully-qualified model string.""" return "projects/{project}/locations/{location}/models/{model}".format( - project=project, location=location, model=model, + project=project, + location=location, + model=model, ) @staticmethod @@ -241,10 +276,16 @@ def parse_model_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def version_path(project: str, model: str, version: str,) -> str: + def version_path( + project: str, + model: str, + version: str, + ) -> str: """Return a fully-qualified version string.""" return "projects/{project}/models/{model}/versions/{version}".format( - project=project, model=model, version=version, + project=project, + model=model, + version=version, ) @staticmethod @@ -257,7 +298,9 @@ def parse_version_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str,) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Return a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -270,9 +313,13 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str,) -> str: + def common_folder_path( + folder: str, + ) -> str: """Return a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder,) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -281,9 +328,13 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str,) -> str: + def common_organization_path( + organization: str, + ) -> str: """Return a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization,) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -292,9 +343,13 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str,) -> str: + def common_project_path( + project: str, + ) -> str: """Return a fully-qualified project string.""" - return "projects/{project}".format(project=project,) + return "projects/{project}".format( + project=project, + ) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -303,10 +358,14 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str,) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Return a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) @staticmethod @@ -511,12 +570,20 @@ def search_migratable_resources( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.SearchMigratableResourcesPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -612,7 +679,12 @@ def batch_migrate_resources( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation.from_gapic( diff --git a/google/cloud/aiplatform_v1beta1/services/migration_service/transports/grpc.py b/google/cloud/aiplatform_v1beta1/services/migration_service/transports/grpc.py index efd4c4b6a4..b73e3936d5 100644 --- a/google/cloud/aiplatform_v1beta1/services/migration_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1beta1/services/migration_service/transports/grpc.py @@ -229,8 +229,7 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property diff --git a/google/cloud/aiplatform_v1beta1/services/model_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/model_service/async_client.py index 3b27b6e184..631671e269 100644 --- a/google/cloud/aiplatform_v1beta1/services/model_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/model_service/async_client.py @@ -246,7 +246,12 @@ async def upload_model( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -324,7 +329,12 @@ async def get_model( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -400,12 +410,20 @@ async def list_models( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListModelsAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -489,7 +507,12 @@ async def update_model( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -579,7 +602,12 @@ async def delete_model( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -677,7 +705,12 @@ async def export_model( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -761,7 +794,12 @@ async def get_model_evaluation( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -837,12 +875,20 @@ async def list_model_evaluations( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListModelEvaluationsAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -919,7 +965,12 @@ async def get_model_evaluation_slice( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -996,12 +1047,20 @@ async def list_model_evaluation_slices( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListModelEvaluationSlicesAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. diff --git a/google/cloud/aiplatform_v1beta1/services/model_service/client.py b/google/cloud/aiplatform_v1beta1/services/model_service/client.py index 30c00c0c9d..423a86bb70 100644 --- a/google/cloud/aiplatform_v1beta1/services/model_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/model_service/client.py @@ -65,7 +65,10 @@ class ModelServiceClientMeta(type): _transport_registry["grpc"] = ModelServiceGrpcTransport _transport_registry["grpc_asyncio"] = ModelServiceGrpcAsyncIOTransport - def get_transport_class(cls, label: str = None,) -> Type[ModelServiceTransport]: + def get_transport_class( + cls, + label: str = None, + ) -> Type[ModelServiceTransport]: """Return an appropriate transport class. Args: @@ -151,10 +154,16 @@ def transport(self) -> ModelServiceTransport: return self._transport @staticmethod - def endpoint_path(project: str, location: str, endpoint: str,) -> str: + def endpoint_path( + project: str, + location: str, + endpoint: str, + ) -> str: """Return a fully-qualified endpoint string.""" return "projects/{project}/locations/{location}/endpoints/{endpoint}".format( - project=project, location=location, endpoint=endpoint, + project=project, + location=location, + endpoint=endpoint, ) @staticmethod @@ -167,10 +176,16 @@ def parse_endpoint_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def model_path(project: str, location: str, model: str,) -> str: + def model_path( + project: str, + location: str, + model: str, + ) -> str: """Return a fully-qualified model string.""" return "projects/{project}/locations/{location}/models/{model}".format( - project=project, location=location, model=model, + project=project, + location=location, + model=model, ) @staticmethod @@ -184,11 +199,17 @@ def parse_model_path(path: str) -> Dict[str, str]: @staticmethod def model_evaluation_path( - project: str, location: str, model: str, evaluation: str, + project: str, + location: str, + model: str, + evaluation: str, ) -> str: """Return a fully-qualified model_evaluation string.""" return "projects/{project}/locations/{location}/models/{model}/evaluations/{evaluation}".format( - project=project, location=location, model=model, evaluation=evaluation, + project=project, + location=location, + model=model, + evaluation=evaluation, ) @staticmethod @@ -202,7 +223,11 @@ def parse_model_evaluation_path(path: str) -> Dict[str, str]: @staticmethod def model_evaluation_slice_path( - project: str, location: str, model: str, evaluation: str, slice: str, + project: str, + location: str, + model: str, + evaluation: str, + slice: str, ) -> str: """Return a fully-qualified model_evaluation_slice string.""" return "projects/{project}/locations/{location}/models/{model}/evaluations/{evaluation}/slices/{slice}".format( @@ -224,11 +249,15 @@ def parse_model_evaluation_slice_path(path: str) -> Dict[str, str]: @staticmethod def training_pipeline_path( - project: str, location: str, training_pipeline: str, + project: str, + location: str, + training_pipeline: str, ) -> str: """Return a fully-qualified training_pipeline string.""" return "projects/{project}/locations/{location}/trainingPipelines/{training_pipeline}".format( - project=project, location=location, training_pipeline=training_pipeline, + project=project, + location=location, + training_pipeline=training_pipeline, ) @staticmethod @@ -241,7 +270,9 @@ def parse_training_pipeline_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str,) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Return a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -254,9 +285,13 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str,) -> str: + def common_folder_path( + folder: str, + ) -> str: """Return a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder,) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -265,9 +300,13 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str,) -> str: + def common_organization_path( + organization: str, + ) -> str: """Return a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization,) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -276,9 +315,13 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str,) -> str: + def common_project_path( + project: str, + ) -> str: """Return a fully-qualified project string.""" - return "projects/{project}".format(project=project,) + return "projects/{project}".format( + project=project, + ) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -287,10 +330,14 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str,) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Return a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) @staticmethod @@ -498,7 +545,12 @@ def upload_model( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -577,7 +629,12 @@ def get_model( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -654,12 +711,20 @@ def list_models( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListModelsPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -744,7 +809,12 @@ def update_model( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -835,7 +905,12 @@ def delete_model( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -934,7 +1009,12 @@ def export_model( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -1019,7 +1099,12 @@ def get_model_evaluation( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1096,12 +1181,20 @@ def list_model_evaluations( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListModelEvaluationsPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -1181,7 +1274,12 @@ def get_model_evaluation_slice( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1261,12 +1359,20 @@ def list_model_evaluation_slices( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListModelEvaluationSlicesPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. diff --git a/google/cloud/aiplatform_v1beta1/services/model_service/transports/grpc.py b/google/cloud/aiplatform_v1beta1/services/model_service/transports/grpc.py index 442b665d3a..98f90e9dc8 100644 --- a/google/cloud/aiplatform_v1beta1/services/model_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1beta1/services/model_service/transports/grpc.py @@ -231,8 +231,7 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property diff --git a/google/cloud/aiplatform_v1beta1/services/pipeline_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/pipeline_service/async_client.py index ef420aae0b..9b3f2f7fa7 100644 --- a/google/cloud/aiplatform_v1beta1/services/pipeline_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/pipeline_service/async_client.py @@ -241,7 +241,12 @@ async def create_training_pipeline( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -319,7 +324,12 @@ async def get_training_pipeline( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -395,12 +405,20 @@ async def list_training_pipelines( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListTrainingPipelinesAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -490,7 +508,12 @@ async def delete_training_pipeline( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -579,7 +602,10 @@ async def cancel_training_pipeline( # Send the request. await rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) diff --git a/google/cloud/aiplatform_v1beta1/services/pipeline_service/client.py b/google/cloud/aiplatform_v1beta1/services/pipeline_service/client.py index e3e7d6aeda..73c79cc90d 100644 --- a/google/cloud/aiplatform_v1beta1/services/pipeline_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/pipeline_service/client.py @@ -67,7 +67,10 @@ class PipelineServiceClientMeta(type): _transport_registry["grpc"] = PipelineServiceGrpcTransport _transport_registry["grpc_asyncio"] = PipelineServiceGrpcAsyncIOTransport - def get_transport_class(cls, label: str = None,) -> Type[PipelineServiceTransport]: + def get_transport_class( + cls, + label: str = None, + ) -> Type[PipelineServiceTransport]: """Return an appropriate transport class. Args: @@ -153,10 +156,16 @@ def transport(self) -> PipelineServiceTransport: return self._transport @staticmethod - def endpoint_path(project: str, location: str, endpoint: str,) -> str: + def endpoint_path( + project: str, + location: str, + endpoint: str, + ) -> str: """Return a fully-qualified endpoint string.""" return "projects/{project}/locations/{location}/endpoints/{endpoint}".format( - project=project, location=location, endpoint=endpoint, + project=project, + location=location, + endpoint=endpoint, ) @staticmethod @@ -169,10 +178,16 @@ def parse_endpoint_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def model_path(project: str, location: str, model: str,) -> str: + def model_path( + project: str, + location: str, + model: str, + ) -> str: """Return a fully-qualified model string.""" return "projects/{project}/locations/{location}/models/{model}".format( - project=project, location=location, model=model, + project=project, + location=location, + model=model, ) @staticmethod @@ -186,11 +201,15 @@ def parse_model_path(path: str) -> Dict[str, str]: @staticmethod def training_pipeline_path( - project: str, location: str, training_pipeline: str, + project: str, + location: str, + training_pipeline: str, ) -> str: """Return a fully-qualified training_pipeline string.""" return "projects/{project}/locations/{location}/trainingPipelines/{training_pipeline}".format( - project=project, location=location, training_pipeline=training_pipeline, + project=project, + location=location, + training_pipeline=training_pipeline, ) @staticmethod @@ -203,7 +222,9 @@ def parse_training_pipeline_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str,) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Return a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -216,9 +237,13 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str,) -> str: + def common_folder_path( + folder: str, + ) -> str: """Return a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder,) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -227,9 +252,13 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str,) -> str: + def common_organization_path( + organization: str, + ) -> str: """Return a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization,) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -238,9 +267,13 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str,) -> str: + def common_project_path( + project: str, + ) -> str: """Return a fully-qualified project string.""" - return "projects/{project}".format(project=project,) + return "projects/{project}".format( + project=project, + ) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -249,10 +282,14 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str,) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Return a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) @staticmethod @@ -461,7 +498,12 @@ def create_training_pipeline( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -540,7 +582,12 @@ def get_training_pipeline( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -617,12 +664,20 @@ def list_training_pipelines( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListTrainingPipelinesPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -713,7 +768,12 @@ def delete_training_pipeline( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -803,7 +863,10 @@ def cancel_training_pipeline( # Send the request. rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) diff --git a/google/cloud/aiplatform_v1beta1/services/pipeline_service/transports/grpc.py b/google/cloud/aiplatform_v1beta1/services/pipeline_service/transports/grpc.py index 4fc6389449..fc6ca0087e 100644 --- a/google/cloud/aiplatform_v1beta1/services/pipeline_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1beta1/services/pipeline_service/transports/grpc.py @@ -232,8 +232,7 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property diff --git a/google/cloud/aiplatform_v1beta1/services/prediction_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/prediction_service/async_client.py index bb58b0bfac..4545ad95e1 100644 --- a/google/cloud/aiplatform_v1beta1/services/prediction_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/prediction_service/async_client.py @@ -241,7 +241,12 @@ async def predict( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -367,7 +372,12 @@ async def explain( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response diff --git a/google/cloud/aiplatform_v1beta1/services/prediction_service/client.py b/google/cloud/aiplatform_v1beta1/services/prediction_service/client.py index 9a5976d697..0a01fe3aae 100644 --- a/google/cloud/aiplatform_v1beta1/services/prediction_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/prediction_service/client.py @@ -56,7 +56,8 @@ class PredictionServiceClientMeta(type): _transport_registry["grpc_asyncio"] = PredictionServiceGrpcAsyncIOTransport def get_transport_class( - cls, label: str = None, + cls, + label: str = None, ) -> Type[PredictionServiceTransport]: """Return an appropriate transport class. @@ -143,10 +144,16 @@ def transport(self) -> PredictionServiceTransport: return self._transport @staticmethod - def endpoint_path(project: str, location: str, endpoint: str,) -> str: + def endpoint_path( + project: str, + location: str, + endpoint: str, + ) -> str: """Return a fully-qualified endpoint string.""" return "projects/{project}/locations/{location}/endpoints/{endpoint}".format( - project=project, location=location, endpoint=endpoint, + project=project, + location=location, + endpoint=endpoint, ) @staticmethod @@ -159,7 +166,9 @@ def parse_endpoint_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str,) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Return a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -172,9 +181,13 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str,) -> str: + def common_folder_path( + folder: str, + ) -> str: """Return a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder,) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -183,9 +196,13 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str,) -> str: + def common_organization_path( + organization: str, + ) -> str: """Return a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization,) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -194,9 +211,13 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str,) -> str: + def common_project_path( + project: str, + ) -> str: """Return a fully-qualified project string.""" - return "projects/{project}".format(project=project,) + return "projects/{project}".format( + project=project, + ) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -205,10 +226,14 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str,) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Return a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) @staticmethod @@ -435,7 +460,12 @@ def predict( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -562,7 +592,12 @@ def explain( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response diff --git a/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/grpc.py b/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/grpc.py index 1a102e1a61..fbfbabef1b 100644 --- a/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/grpc.py @@ -225,8 +225,7 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property diff --git a/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/async_client.py index c693126d4c..d27ca66fe1 100644 --- a/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/async_client.py @@ -247,7 +247,12 @@ async def create_specialist_pool( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -338,7 +343,12 @@ async def get_specialist_pool( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -414,12 +424,20 @@ async def list_specialist_pools( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListSpecialistPoolsAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -509,7 +527,12 @@ async def delete_specialist_pool( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -610,7 +633,12 @@ async def update_specialist_pool( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( diff --git a/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/client.py b/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/client.py index efc19eca12..58a55cd7f7 100644 --- a/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/client.py @@ -62,7 +62,8 @@ class SpecialistPoolServiceClientMeta(type): _transport_registry["grpc_asyncio"] = SpecialistPoolServiceGrpcAsyncIOTransport def get_transport_class( - cls, label: str = None, + cls, + label: str = None, ) -> Type[SpecialistPoolServiceTransport]: """Return an appropriate transport class. @@ -155,10 +156,16 @@ def transport(self) -> SpecialistPoolServiceTransport: return self._transport @staticmethod - def specialist_pool_path(project: str, location: str, specialist_pool: str,) -> str: + def specialist_pool_path( + project: str, + location: str, + specialist_pool: str, + ) -> str: """Return a fully-qualified specialist_pool string.""" return "projects/{project}/locations/{location}/specialistPools/{specialist_pool}".format( - project=project, location=location, specialist_pool=specialist_pool, + project=project, + location=location, + specialist_pool=specialist_pool, ) @staticmethod @@ -171,7 +178,9 @@ def parse_specialist_pool_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str,) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Return a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -184,9 +193,13 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str,) -> str: + def common_folder_path( + folder: str, + ) -> str: """Return a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder,) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -195,9 +208,13 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str,) -> str: + def common_organization_path( + organization: str, + ) -> str: """Return a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization,) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -206,9 +223,13 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str,) -> str: + def common_project_path( + project: str, + ) -> str: """Return a fully-qualified project string.""" - return "projects/{project}".format(project=project,) + return "projects/{project}".format( + project=project, + ) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -217,10 +238,14 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str,) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Return a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) @staticmethod @@ -434,7 +459,12 @@ def create_specialist_pool( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -526,7 +556,12 @@ def get_specialist_pool( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -603,12 +638,20 @@ def list_specialist_pools( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListSpecialistPoolsPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -699,7 +742,12 @@ def delete_specialist_pool( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -801,7 +849,12 @@ def update_specialist_pool( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = ga_operation.from_gapic( diff --git a/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/transports/grpc.py b/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/transports/grpc.py index 2d1442ae33..2fc7e0881b 100644 --- a/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/transports/grpc.py +++ b/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/transports/grpc.py @@ -233,8 +233,7 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property diff --git a/google/cloud/aiplatform_v1beta1/types/accelerator_type.py b/google/cloud/aiplatform_v1beta1/types/accelerator_type.py index 337b0eeaf5..23be882aaa 100644 --- a/google/cloud/aiplatform_v1beta1/types/accelerator_type.py +++ b/google/cloud/aiplatform_v1beta1/types/accelerator_type.py @@ -19,7 +19,10 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", manifest={"AcceleratorType",}, + package="google.cloud.aiplatform.v1beta1", + manifest={ + "AcceleratorType", + }, ) diff --git a/google/cloud/aiplatform_v1beta1/types/annotation.py b/google/cloud/aiplatform_v1beta1/types/annotation.py index 7734fcc512..93bd0481b1 100644 --- a/google/cloud/aiplatform_v1beta1/types/annotation.py +++ b/google/cloud/aiplatform_v1beta1/types/annotation.py @@ -24,7 +24,10 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", manifest={"Annotation",}, + package="google.cloud.aiplatform.v1beta1", + manifest={ + "Annotation", + }, ) @@ -91,16 +94,30 @@ class Annotation(proto.Message): payload_schema_uri = proto.Field(proto.STRING, number=2) - payload = proto.Field(proto.MESSAGE, number=3, message=struct.Value,) + payload = proto.Field( + proto.MESSAGE, + number=3, + message=struct.Value, + ) - create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) + create_time = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp.Timestamp, + ) - update_time = proto.Field(proto.MESSAGE, number=7, message=timestamp.Timestamp,) + update_time = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp.Timestamp, + ) etag = proto.Field(proto.STRING, number=8) annotation_source = proto.Field( - proto.MESSAGE, number=5, message=user_action_reference.UserActionReference, + proto.MESSAGE, + number=5, + message=user_action_reference.UserActionReference, ) labels = proto.MapField(proto.STRING, proto.STRING, number=6) diff --git a/google/cloud/aiplatform_v1beta1/types/annotation_spec.py b/google/cloud/aiplatform_v1beta1/types/annotation_spec.py index a5a4b3d489..2d6e16e44f 100644 --- a/google/cloud/aiplatform_v1beta1/types/annotation_spec.py +++ b/google/cloud/aiplatform_v1beta1/types/annotation_spec.py @@ -22,7 +22,10 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", manifest={"AnnotationSpec",}, + package="google.cloud.aiplatform.v1beta1", + manifest={ + "AnnotationSpec", + }, ) @@ -55,9 +58,17 @@ class AnnotationSpec(proto.Message): display_name = proto.Field(proto.STRING, number=2) - create_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,) - - update_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) + create_time = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp.Timestamp, + ) + + update_time = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp.Timestamp, + ) etag = proto.Field(proto.STRING, number=5) diff --git a/google/cloud/aiplatform_v1beta1/types/batch_prediction_job.py b/google/cloud/aiplatform_v1beta1/types/batch_prediction_job.py index 625bf83155..3d7501f3ce 100644 --- a/google/cloud/aiplatform_v1beta1/types/batch_prediction_job.py +++ b/google/cloud/aiplatform_v1beta1/types/batch_prediction_job.py @@ -34,7 +34,10 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", manifest={"BatchPredictionJob",}, + package="google.cloud.aiplatform.v1beta1", + manifest={ + "BatchPredictionJob", + }, ) @@ -211,11 +214,17 @@ class InputConfig(proto.Message): """ gcs_source = proto.Field( - proto.MESSAGE, number=2, oneof="source", message=io.GcsSource, + proto.MESSAGE, + number=2, + oneof="source", + message=io.GcsSource, ) bigquery_source = proto.Field( - proto.MESSAGE, number=3, oneof="source", message=io.BigQuerySource, + proto.MESSAGE, + number=3, + oneof="source", + message=io.BigQuerySource, ) instances_format = proto.Field(proto.STRING, number=1) @@ -287,7 +296,10 @@ class OutputConfig(proto.Message): """ gcs_destination = proto.Field( - proto.MESSAGE, number=2, oneof="destination", message=io.GcsDestination, + proto.MESSAGE, + number=2, + oneof="destination", + message=io.GcsDestination, ) bigquery_destination = proto.Field( @@ -328,14 +340,28 @@ class OutputInfo(proto.Message): model = proto.Field(proto.STRING, number=3) - input_config = proto.Field(proto.MESSAGE, number=4, message=InputConfig,) + input_config = proto.Field( + proto.MESSAGE, + number=4, + message=InputConfig, + ) - model_parameters = proto.Field(proto.MESSAGE, number=5, message=struct.Value,) + model_parameters = proto.Field( + proto.MESSAGE, + number=5, + message=struct.Value, + ) - output_config = proto.Field(proto.MESSAGE, number=6, message=OutputConfig,) + output_config = proto.Field( + proto.MESSAGE, + number=6, + message=OutputConfig, + ) dedicated_resources = proto.Field( - proto.MESSAGE, number=7, message=machine_resources.BatchDedicatedResources, + proto.MESSAGE, + number=7, + message=machine_resources.BatchDedicatedResources, ) manual_batch_tuning_parameters = proto.Field( @@ -347,34 +373,70 @@ class OutputInfo(proto.Message): generate_explanation = proto.Field(proto.BOOL, number=23) explanation_spec = proto.Field( - proto.MESSAGE, number=25, message=explanation.ExplanationSpec, + proto.MESSAGE, + number=25, + message=explanation.ExplanationSpec, ) - output_info = proto.Field(proto.MESSAGE, number=9, message=OutputInfo,) + output_info = proto.Field( + proto.MESSAGE, + number=9, + message=OutputInfo, + ) - state = proto.Field(proto.ENUM, number=10, enum=job_state.JobState,) + state = proto.Field( + proto.ENUM, + number=10, + enum=job_state.JobState, + ) - error = proto.Field(proto.MESSAGE, number=11, message=status.Status,) + error = proto.Field( + proto.MESSAGE, + number=11, + message=status.Status, + ) partial_failures = proto.RepeatedField( - proto.MESSAGE, number=12, message=status.Status, + proto.MESSAGE, + number=12, + message=status.Status, ) resources_consumed = proto.Field( - proto.MESSAGE, number=13, message=machine_resources.ResourcesConsumed, + proto.MESSAGE, + number=13, + message=machine_resources.ResourcesConsumed, ) completion_stats = proto.Field( - proto.MESSAGE, number=14, message=gca_completion_stats.CompletionStats, + proto.MESSAGE, + number=14, + message=gca_completion_stats.CompletionStats, ) - create_time = proto.Field(proto.MESSAGE, number=15, message=timestamp.Timestamp,) + create_time = proto.Field( + proto.MESSAGE, + number=15, + message=timestamp.Timestamp, + ) - start_time = proto.Field(proto.MESSAGE, number=16, message=timestamp.Timestamp,) + start_time = proto.Field( + proto.MESSAGE, + number=16, + message=timestamp.Timestamp, + ) - end_time = proto.Field(proto.MESSAGE, number=17, message=timestamp.Timestamp,) + end_time = proto.Field( + proto.MESSAGE, + number=17, + message=timestamp.Timestamp, + ) - update_time = proto.Field(proto.MESSAGE, number=18, message=timestamp.Timestamp,) + update_time = proto.Field( + proto.MESSAGE, + number=18, + message=timestamp.Timestamp, + ) labels = proto.MapField(proto.STRING, proto.STRING, number=19) diff --git a/google/cloud/aiplatform_v1beta1/types/completion_stats.py b/google/cloud/aiplatform_v1beta1/types/completion_stats.py index 165be59634..f2626b9c9b 100644 --- a/google/cloud/aiplatform_v1beta1/types/completion_stats.py +++ b/google/cloud/aiplatform_v1beta1/types/completion_stats.py @@ -19,7 +19,10 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", manifest={"CompletionStats",}, + package="google.cloud.aiplatform.v1beta1", + manifest={ + "CompletionStats", + }, ) diff --git a/google/cloud/aiplatform_v1beta1/types/custom_job.py b/google/cloud/aiplatform_v1beta1/types/custom_job.py index 2d8745538c..d4e2a086bd 100644 --- a/google/cloud/aiplatform_v1beta1/types/custom_job.py +++ b/google/cloud/aiplatform_v1beta1/types/custom_job.py @@ -89,19 +89,47 @@ class CustomJob(proto.Message): display_name = proto.Field(proto.STRING, number=2) - job_spec = proto.Field(proto.MESSAGE, number=4, message="CustomJobSpec",) + job_spec = proto.Field( + proto.MESSAGE, + number=4, + message="CustomJobSpec", + ) - state = proto.Field(proto.ENUM, number=5, enum=job_state.JobState,) + state = proto.Field( + proto.ENUM, + number=5, + enum=job_state.JobState, + ) - create_time = proto.Field(proto.MESSAGE, number=6, message=timestamp.Timestamp,) + create_time = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp.Timestamp, + ) - start_time = proto.Field(proto.MESSAGE, number=7, message=timestamp.Timestamp,) + start_time = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp.Timestamp, + ) - end_time = proto.Field(proto.MESSAGE, number=8, message=timestamp.Timestamp,) + end_time = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp.Timestamp, + ) - update_time = proto.Field(proto.MESSAGE, number=9, message=timestamp.Timestamp,) + update_time = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp.Timestamp, + ) - error = proto.Field(proto.MESSAGE, number=10, message=status.Status,) + error = proto.Field( + proto.MESSAGE, + number=10, + message=status.Status, + ) labels = proto.MapField(proto.STRING, proto.STRING, number=11) @@ -167,17 +195,25 @@ class CustomJobSpec(proto.Message): """ worker_pool_specs = proto.RepeatedField( - proto.MESSAGE, number=1, message="WorkerPoolSpec", + proto.MESSAGE, + number=1, + message="WorkerPoolSpec", ) - scheduling = proto.Field(proto.MESSAGE, number=3, message="Scheduling",) + scheduling = proto.Field( + proto.MESSAGE, + number=3, + message="Scheduling", + ) service_account = proto.Field(proto.STRING, number=4) network = proto.Field(proto.STRING, number=5) base_output_directory = proto.Field( - proto.MESSAGE, number=6, message=io.GcsDestination, + proto.MESSAGE, + number=6, + message=io.GcsDestination, ) @@ -200,21 +236,31 @@ class WorkerPoolSpec(proto.Message): """ container_spec = proto.Field( - proto.MESSAGE, number=6, oneof="task", message="ContainerSpec", + proto.MESSAGE, + number=6, + oneof="task", + message="ContainerSpec", ) python_package_spec = proto.Field( - proto.MESSAGE, number=7, oneof="task", message="PythonPackageSpec", + proto.MESSAGE, + number=7, + oneof="task", + message="PythonPackageSpec", ) machine_spec = proto.Field( - proto.MESSAGE, number=1, message=machine_resources.MachineSpec, + proto.MESSAGE, + number=1, + message=machine_resources.MachineSpec, ) replica_count = proto.Field(proto.INT64, number=2) disk_spec = proto.Field( - proto.MESSAGE, number=5, message=machine_resources.DiskSpec, + proto.MESSAGE, + number=5, + message=machine_resources.DiskSpec, ) @@ -290,7 +336,11 @@ class Scheduling(proto.Message): to workers leaving and joining a job. """ - timeout = proto.Field(proto.MESSAGE, number=1, message=duration.Duration,) + timeout = proto.Field( + proto.MESSAGE, + number=1, + message=duration.Duration, + ) restart_job_on_worker_restart = proto.Field(proto.BOOL, number=3) diff --git a/google/cloud/aiplatform_v1beta1/types/data_item.py b/google/cloud/aiplatform_v1beta1/types/data_item.py index e43a944d94..8ef4b9c8c6 100644 --- a/google/cloud/aiplatform_v1beta1/types/data_item.py +++ b/google/cloud/aiplatform_v1beta1/types/data_item.py @@ -23,7 +23,10 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", manifest={"DataItem",}, + package="google.cloud.aiplatform.v1beta1", + manifest={ + "DataItem", + }, ) @@ -70,13 +73,25 @@ class DataItem(proto.Message): name = proto.Field(proto.STRING, number=1) - create_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) + create_time = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp.Timestamp, + ) - update_time = proto.Field(proto.MESSAGE, number=6, message=timestamp.Timestamp,) + update_time = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp.Timestamp, + ) labels = proto.MapField(proto.STRING, proto.STRING, number=3) - payload = proto.Field(proto.MESSAGE, number=4, message=struct.Value,) + payload = proto.Field( + proto.MESSAGE, + number=4, + message=struct.Value, + ) etag = proto.Field(proto.STRING, number=7) diff --git a/google/cloud/aiplatform_v1beta1/types/data_labeling_job.py b/google/cloud/aiplatform_v1beta1/types/data_labeling_job.py index af1bcdd871..c1542d0661 100644 --- a/google/cloud/aiplatform_v1beta1/types/data_labeling_job.py +++ b/google/cloud/aiplatform_v1beta1/types/data_labeling_job.py @@ -146,26 +146,52 @@ class DataLabelingJob(proto.Message): inputs_schema_uri = proto.Field(proto.STRING, number=6) - inputs = proto.Field(proto.MESSAGE, number=7, message=struct.Value,) + inputs = proto.Field( + proto.MESSAGE, + number=7, + message=struct.Value, + ) - state = proto.Field(proto.ENUM, number=8, enum=job_state.JobState,) + state = proto.Field( + proto.ENUM, + number=8, + enum=job_state.JobState, + ) labeling_progress = proto.Field(proto.INT32, number=13) - current_spend = proto.Field(proto.MESSAGE, number=14, message=money.Money,) + current_spend = proto.Field( + proto.MESSAGE, + number=14, + message=money.Money, + ) - create_time = proto.Field(proto.MESSAGE, number=9, message=timestamp.Timestamp,) + create_time = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp.Timestamp, + ) - update_time = proto.Field(proto.MESSAGE, number=10, message=timestamp.Timestamp,) + update_time = proto.Field( + proto.MESSAGE, + number=10, + message=timestamp.Timestamp, + ) - error = proto.Field(proto.MESSAGE, number=22, message=status.Status,) + error = proto.Field( + proto.MESSAGE, + number=22, + message=status.Status, + ) labels = proto.MapField(proto.STRING, proto.STRING, number=11) specialist_pools = proto.RepeatedField(proto.STRING, number=16) active_learning_config = proto.Field( - proto.MESSAGE, number=21, message="ActiveLearningConfig", + proto.MESSAGE, + number=21, + message="ActiveLearningConfig", ) @@ -202,9 +228,17 @@ class ActiveLearningConfig(proto.Message): proto.INT32, number=2, oneof="human_labeling_budget" ) - sample_config = proto.Field(proto.MESSAGE, number=3, message="SampleConfig",) + sample_config = proto.Field( + proto.MESSAGE, + number=3, + message="SampleConfig", + ) - training_config = proto.Field(proto.MESSAGE, number=4, message="TrainingConfig",) + training_config = proto.Field( + proto.MESSAGE, + number=4, + message="TrainingConfig", + ) class SampleConfig(proto.Message): @@ -241,7 +275,11 @@ class SampleStrategy(proto.Enum): proto.INT32, number=3, oneof="following_batch_sample_size" ) - sample_strategy = proto.Field(proto.ENUM, number=5, enum=SampleStrategy,) + sample_strategy = proto.Field( + proto.ENUM, + number=5, + enum=SampleStrategy, + ) class TrainingConfig(proto.Message): diff --git a/google/cloud/aiplatform_v1beta1/types/dataset.py b/google/cloud/aiplatform_v1beta1/types/dataset.py index 76f6462f40..8b93a04e1b 100644 --- a/google/cloud/aiplatform_v1beta1/types/dataset.py +++ b/google/cloud/aiplatform_v1beta1/types/dataset.py @@ -25,7 +25,11 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1", - manifest={"Dataset", "ImportDataConfig", "ExportDataConfig",}, + manifest={ + "Dataset", + "ImportDataConfig", + "ExportDataConfig", + }, ) @@ -88,11 +92,23 @@ class Dataset(proto.Message): metadata_schema_uri = proto.Field(proto.STRING, number=3) - metadata = proto.Field(proto.MESSAGE, number=8, message=struct.Value,) + metadata = proto.Field( + proto.MESSAGE, + number=8, + message=struct.Value, + ) - create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) + create_time = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp.Timestamp, + ) - update_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,) + update_time = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp.Timestamp, + ) etag = proto.Field(proto.STRING, number=6) @@ -132,7 +148,10 @@ class ImportDataConfig(proto.Message): """ gcs_source = proto.Field( - proto.MESSAGE, number=1, oneof="source", message=io.GcsSource, + proto.MESSAGE, + number=1, + oneof="source", + message=io.GcsSource, ) data_item_labels = proto.MapField(proto.STRING, proto.STRING, number=2) @@ -166,7 +185,10 @@ class ExportDataConfig(proto.Message): """ gcs_destination = proto.Field( - proto.MESSAGE, number=1, oneof="destination", message=io.GcsDestination, + proto.MESSAGE, + number=1, + oneof="destination", + message=io.GcsDestination, ) annotations_filter = proto.Field(proto.STRING, number=2) diff --git a/google/cloud/aiplatform_v1beta1/types/dataset_service.py b/google/cloud/aiplatform_v1beta1/types/dataset_service.py index 7160b7b52f..aebd5ebb31 100644 --- a/google/cloud/aiplatform_v1beta1/types/dataset_service.py +++ b/google/cloud/aiplatform_v1beta1/types/dataset_service.py @@ -65,7 +65,11 @@ class CreateDatasetRequest(proto.Message): parent = proto.Field(proto.STRING, number=1) - dataset = proto.Field(proto.MESSAGE, number=2, message=gca_dataset.Dataset,) + dataset = proto.Field( + proto.MESSAGE, + number=2, + message=gca_dataset.Dataset, + ) class CreateDatasetOperationMetadata(proto.Message): @@ -78,7 +82,9 @@ class CreateDatasetOperationMetadata(proto.Message): """ generic_metadata = proto.Field( - proto.MESSAGE, number=1, message=operation.GenericOperationMetadata, + proto.MESSAGE, + number=1, + message=operation.GenericOperationMetadata, ) @@ -95,7 +101,11 @@ class GetDatasetRequest(proto.Message): name = proto.Field(proto.STRING, number=1) - read_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + read_mask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask.FieldMask, + ) class UpdateDatasetRequest(proto.Message): @@ -118,9 +128,17 @@ class UpdateDatasetRequest(proto.Message): - ``labels`` """ - dataset = proto.Field(proto.MESSAGE, number=1, message=gca_dataset.Dataset,) + dataset = proto.Field( + proto.MESSAGE, + number=1, + message=gca_dataset.Dataset, + ) - update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + update_mask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask.FieldMask, + ) class ListDatasetsRequest(proto.Message): @@ -157,7 +175,11 @@ class ListDatasetsRequest(proto.Message): page_token = proto.Field(proto.STRING, number=4) - read_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask.FieldMask,) + read_mask = proto.Field( + proto.MESSAGE, + number=5, + message=field_mask.FieldMask, + ) order_by = proto.Field(proto.STRING, number=6) @@ -179,7 +201,9 @@ def raw_page(self): return self datasets = proto.RepeatedField( - proto.MESSAGE, number=1, message=gca_dataset.Dataset, + proto.MESSAGE, + number=1, + message=gca_dataset.Dataset, ) next_page_token = proto.Field(proto.STRING, number=2) @@ -216,7 +240,9 @@ class ImportDataRequest(proto.Message): name = proto.Field(proto.STRING, number=1) import_configs = proto.RepeatedField( - proto.MESSAGE, number=2, message=gca_dataset.ImportDataConfig, + proto.MESSAGE, + number=2, + message=gca_dataset.ImportDataConfig, ) @@ -236,7 +262,9 @@ class ImportDataOperationMetadata(proto.Message): """ generic_metadata = proto.Field( - proto.MESSAGE, number=1, message=operation.GenericOperationMetadata, + proto.MESSAGE, + number=1, + message=operation.GenericOperationMetadata, ) @@ -255,7 +283,9 @@ class ExportDataRequest(proto.Message): name = proto.Field(proto.STRING, number=1) export_config = proto.Field( - proto.MESSAGE, number=2, message=gca_dataset.ExportDataConfig, + proto.MESSAGE, + number=2, + message=gca_dataset.ExportDataConfig, ) @@ -286,7 +316,9 @@ class ExportDataOperationMetadata(proto.Message): """ generic_metadata = proto.Field( - proto.MESSAGE, number=1, message=operation.GenericOperationMetadata, + proto.MESSAGE, + number=1, + message=operation.GenericOperationMetadata, ) gcs_output_directory = proto.Field(proto.STRING, number=2) @@ -323,7 +355,11 @@ class ListDataItemsRequest(proto.Message): page_token = proto.Field(proto.STRING, number=4) - read_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask.FieldMask,) + read_mask = proto.Field( + proto.MESSAGE, + number=5, + message=field_mask.FieldMask, + ) order_by = proto.Field(proto.STRING, number=6) @@ -345,7 +381,9 @@ def raw_page(self): return self data_items = proto.RepeatedField( - proto.MESSAGE, number=1, message=data_item.DataItem, + proto.MESSAGE, + number=1, + message=data_item.DataItem, ) next_page_token = proto.Field(proto.STRING, number=2) @@ -366,7 +404,11 @@ class GetAnnotationSpecRequest(proto.Message): name = proto.Field(proto.STRING, number=1) - read_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + read_mask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask.FieldMask, + ) class ListAnnotationsRequest(proto.Message): @@ -401,7 +443,11 @@ class ListAnnotationsRequest(proto.Message): page_token = proto.Field(proto.STRING, number=4) - read_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask.FieldMask,) + read_mask = proto.Field( + proto.MESSAGE, + number=5, + message=field_mask.FieldMask, + ) order_by = proto.Field(proto.STRING, number=6) @@ -423,7 +469,9 @@ def raw_page(self): return self annotations = proto.RepeatedField( - proto.MESSAGE, number=1, message=annotation.Annotation, + proto.MESSAGE, + number=1, + message=annotation.Annotation, ) next_page_token = proto.Field(proto.STRING, number=2) diff --git a/google/cloud/aiplatform_v1beta1/types/deployed_model_ref.py b/google/cloud/aiplatform_v1beta1/types/deployed_model_ref.py index b0ec7010a2..f94dc7793a 100644 --- a/google/cloud/aiplatform_v1beta1/types/deployed_model_ref.py +++ b/google/cloud/aiplatform_v1beta1/types/deployed_model_ref.py @@ -19,7 +19,10 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", manifest={"DeployedModelRef",}, + package="google.cloud.aiplatform.v1beta1", + manifest={ + "DeployedModelRef", + }, ) diff --git a/google/cloud/aiplatform_v1beta1/types/endpoint.py b/google/cloud/aiplatform_v1beta1/types/endpoint.py index f1ba6ed85d..326ca3c35c 100644 --- a/google/cloud/aiplatform_v1beta1/types/endpoint.py +++ b/google/cloud/aiplatform_v1beta1/types/endpoint.py @@ -24,7 +24,11 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", manifest={"Endpoint", "DeployedModel",}, + package="google.cloud.aiplatform.v1beta1", + manifest={ + "Endpoint", + "DeployedModel", + }, ) @@ -88,7 +92,9 @@ class Endpoint(proto.Message): description = proto.Field(proto.STRING, number=3) deployed_models = proto.RepeatedField( - proto.MESSAGE, number=4, message="DeployedModel", + proto.MESSAGE, + number=4, + message="DeployedModel", ) traffic_split = proto.MapField(proto.STRING, proto.INT32, number=5) @@ -97,9 +103,17 @@ class Endpoint(proto.Message): labels = proto.MapField(proto.STRING, proto.STRING, number=7) - create_time = proto.Field(proto.MESSAGE, number=8, message=timestamp.Timestamp,) + create_time = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp.Timestamp, + ) - update_time = proto.Field(proto.MESSAGE, number=9, message=timestamp.Timestamp,) + update_time = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp.Timestamp, + ) class DeployedModel(proto.Message): @@ -193,10 +207,16 @@ class DeployedModel(proto.Message): display_name = proto.Field(proto.STRING, number=3) - create_time = proto.Field(proto.MESSAGE, number=6, message=timestamp.Timestamp,) + create_time = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp.Timestamp, + ) explanation_spec = proto.Field( - proto.MESSAGE, number=9, message=explanation.ExplanationSpec, + proto.MESSAGE, + number=9, + message=explanation.ExplanationSpec, ) service_account = proto.Field(proto.STRING, number=11) diff --git a/google/cloud/aiplatform_v1beta1/types/endpoint_service.py b/google/cloud/aiplatform_v1beta1/types/endpoint_service.py index 4bc9f35594..659268cd22 100644 --- a/google/cloud/aiplatform_v1beta1/types/endpoint_service.py +++ b/google/cloud/aiplatform_v1beta1/types/endpoint_service.py @@ -58,7 +58,11 @@ class CreateEndpointRequest(proto.Message): parent = proto.Field(proto.STRING, number=1) - endpoint = proto.Field(proto.MESSAGE, number=2, message=gca_endpoint.Endpoint,) + endpoint = proto.Field( + proto.MESSAGE, + number=2, + message=gca_endpoint.Endpoint, + ) class CreateEndpointOperationMetadata(proto.Message): @@ -71,7 +75,9 @@ class CreateEndpointOperationMetadata(proto.Message): """ generic_metadata = proto.Field( - proto.MESSAGE, number=1, message=operation.GenericOperationMetadata, + proto.MESSAGE, + number=1, + message=operation.GenericOperationMetadata, ) @@ -143,7 +149,11 @@ class ListEndpointsRequest(proto.Message): page_token = proto.Field(proto.STRING, number=4) - read_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask.FieldMask,) + read_mask = proto.Field( + proto.MESSAGE, + number=5, + message=field_mask.FieldMask, + ) class ListEndpointsResponse(proto.Message): @@ -164,7 +174,9 @@ def raw_page(self): return self endpoints = proto.RepeatedField( - proto.MESSAGE, number=1, message=gca_endpoint.Endpoint, + proto.MESSAGE, + number=1, + message=gca_endpoint.Endpoint, ) next_page_token = proto.Field(proto.STRING, number=2) @@ -183,9 +195,17 @@ class UpdateEndpointRequest(proto.Message): resource. """ - endpoint = proto.Field(proto.MESSAGE, number=1, message=gca_endpoint.Endpoint,) + endpoint = proto.Field( + proto.MESSAGE, + number=1, + message=gca_endpoint.Endpoint, + ) - update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + update_mask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask.FieldMask, + ) class DeleteEndpointRequest(proto.Message): @@ -239,7 +259,9 @@ class DeployModelRequest(proto.Message): endpoint = proto.Field(proto.STRING, number=1) deployed_model = proto.Field( - proto.MESSAGE, number=2, message=gca_endpoint.DeployedModel, + proto.MESSAGE, + number=2, + message=gca_endpoint.DeployedModel, ) traffic_split = proto.MapField(proto.STRING, proto.INT32, number=3) @@ -256,7 +278,9 @@ class DeployModelResponse(proto.Message): """ deployed_model = proto.Field( - proto.MESSAGE, number=1, message=gca_endpoint.DeployedModel, + proto.MESSAGE, + number=1, + message=gca_endpoint.DeployedModel, ) @@ -270,7 +294,9 @@ class DeployModelOperationMetadata(proto.Message): """ generic_metadata = proto.Field( - proto.MESSAGE, number=1, message=operation.GenericOperationMetadata, + proto.MESSAGE, + number=1, + message=operation.GenericOperationMetadata, ) @@ -320,7 +346,9 @@ class UndeployModelOperationMetadata(proto.Message): """ generic_metadata = proto.Field( - proto.MESSAGE, number=1, message=operation.GenericOperationMetadata, + proto.MESSAGE, + number=1, + message=operation.GenericOperationMetadata, ) diff --git a/google/cloud/aiplatform_v1beta1/types/env_var.py b/google/cloud/aiplatform_v1beta1/types/env_var.py index 207e8275cd..74b460116d 100644 --- a/google/cloud/aiplatform_v1beta1/types/env_var.py +++ b/google/cloud/aiplatform_v1beta1/types/env_var.py @@ -19,7 +19,10 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", manifest={"EnvVar",}, + package="google.cloud.aiplatform.v1beta1", + manifest={ + "EnvVar", + }, ) diff --git a/google/cloud/aiplatform_v1beta1/types/explanation.py b/google/cloud/aiplatform_v1beta1/types/explanation.py index 7a495fff1e..4b7c14c490 100644 --- a/google/cloud/aiplatform_v1beta1/types/explanation.py +++ b/google/cloud/aiplatform_v1beta1/types/explanation.py @@ -71,7 +71,11 @@ class Explanation(proto.Message): in the same order as they appear in the output_indices. """ - attributions = proto.RepeatedField(proto.MESSAGE, number=1, message="Attribution",) + attributions = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Attribution", + ) class ModelExplanation(proto.Message): @@ -109,7 +113,9 @@ class ModelExplanation(proto.Message): """ mean_attributions = proto.RepeatedField( - proto.MESSAGE, number=1, message="Attribution", + proto.MESSAGE, + number=1, + message="Attribution", ) @@ -233,7 +239,11 @@ class Attribution(proto.Message): instance_output_value = proto.Field(proto.DOUBLE, number=2) - feature_attributions = proto.Field(proto.MESSAGE, number=3, message=struct.Value,) + feature_attributions = proto.Field( + proto.MESSAGE, + number=3, + message=struct.Value, + ) output_index = proto.RepeatedField(proto.INT32, number=4) @@ -256,10 +266,16 @@ class ExplanationSpec(proto.Message): input and output for explanation. """ - parameters = proto.Field(proto.MESSAGE, number=1, message="ExplanationParameters",) + parameters = proto.Field( + proto.MESSAGE, + number=1, + message="ExplanationParameters", + ) metadata = proto.Field( - proto.MESSAGE, number=2, message=explanation_metadata.ExplanationMetadata, + proto.MESSAGE, + number=2, + message=explanation_metadata.ExplanationMetadata, ) @@ -317,7 +333,10 @@ class ExplanationParameters(proto.Message): """ sampled_shapley_attribution = proto.Field( - proto.MESSAGE, number=1, oneof="method", message="SampledShapleyAttribution", + proto.MESSAGE, + number=1, + oneof="method", + message="SampledShapleyAttribution", ) integrated_gradients_attribution = proto.Field( @@ -328,12 +347,19 @@ class ExplanationParameters(proto.Message): ) xrai_attribution = proto.Field( - proto.MESSAGE, number=3, oneof="method", message="XraiAttribution", + proto.MESSAGE, + number=3, + oneof="method", + message="XraiAttribution", ) top_k = proto.Field(proto.INT32, number=4) - output_indices = proto.Field(proto.MESSAGE, number=5, message=struct.ListValue,) + output_indices = proto.Field( + proto.MESSAGE, + number=5, + message=struct.ListValue, + ) class SampledShapleyAttribution(proto.Message): @@ -381,7 +407,9 @@ class IntegratedGradientsAttribution(proto.Message): step_count = proto.Field(proto.INT32, number=1) smooth_grad_config = proto.Field( - proto.MESSAGE, number=2, message="SmoothGradConfig", + proto.MESSAGE, + number=2, + message="SmoothGradConfig", ) @@ -416,7 +444,9 @@ class XraiAttribution(proto.Message): step_count = proto.Field(proto.INT32, number=1) smooth_grad_config = proto.Field( - proto.MESSAGE, number=2, message="SmoothGradConfig", + proto.MESSAGE, + number=2, + message="SmoothGradConfig", ) @@ -508,7 +538,9 @@ class NoiseSigmaForFeature(proto.Message): sigma = proto.Field(proto.FLOAT, number=2) noise_sigma = proto.RepeatedField( - proto.MESSAGE, number=1, message=NoiseSigmaForFeature, + proto.MESSAGE, + number=1, + message=NoiseSigmaForFeature, ) diff --git a/google/cloud/aiplatform_v1beta1/types/explanation_metadata.py b/google/cloud/aiplatform_v1beta1/types/explanation_metadata.py index 7261c064f8..78c46d1dd0 100644 --- a/google/cloud/aiplatform_v1beta1/types/explanation_metadata.py +++ b/google/cloud/aiplatform_v1beta1/types/explanation_metadata.py @@ -22,7 +22,10 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", manifest={"ExplanationMetadata",}, + package="google.cloud.aiplatform.v1beta1", + manifest={ + "ExplanationMetadata", + }, ) @@ -316,13 +319,17 @@ class OverlayType(proto.Enum): ) input_baselines = proto.RepeatedField( - proto.MESSAGE, number=1, message=struct.Value, + proto.MESSAGE, + number=1, + message=struct.Value, ) input_tensor_name = proto.Field(proto.STRING, number=2) encoding = proto.Field( - proto.ENUM, number=3, enum="ExplanationMetadata.InputMetadata.Encoding", + proto.ENUM, + number=3, + enum="ExplanationMetadata.InputMetadata.Encoding", ) modality = proto.Field(proto.STRING, number=4) @@ -342,7 +349,9 @@ class OverlayType(proto.Enum): encoded_tensor_name = proto.Field(proto.STRING, number=9) encoded_baselines = proto.RepeatedField( - proto.MESSAGE, number=10, message=struct.Value, + proto.MESSAGE, + number=10, + message=struct.Value, ) visualization = proto.Field( @@ -391,7 +400,10 @@ class OutputMetadata(proto.Message): """ index_display_name_mapping = proto.Field( - proto.MESSAGE, number=1, oneof="display_name_mapping", message=struct.Value, + proto.MESSAGE, + number=1, + oneof="display_name_mapping", + message=struct.Value, ) display_name_mapping_key = proto.Field( @@ -401,11 +413,17 @@ class OutputMetadata(proto.Message): output_tensor_name = proto.Field(proto.STRING, number=3) inputs = proto.MapField( - proto.STRING, proto.MESSAGE, number=1, message=InputMetadata, + proto.STRING, + proto.MESSAGE, + number=1, + message=InputMetadata, ) outputs = proto.MapField( - proto.STRING, proto.MESSAGE, number=2, message=OutputMetadata, + proto.STRING, + proto.MESSAGE, + number=2, + message=OutputMetadata, ) feature_attributions_schema_uri = proto.Field(proto.STRING, number=3) diff --git a/google/cloud/aiplatform_v1beta1/types/hyperparameter_tuning_job.py b/google/cloud/aiplatform_v1beta1/types/hyperparameter_tuning_job.py index 78af635e79..186963683c 100644 --- a/google/cloud/aiplatform_v1beta1/types/hyperparameter_tuning_job.py +++ b/google/cloud/aiplatform_v1beta1/types/hyperparameter_tuning_job.py @@ -26,7 +26,10 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", manifest={"HyperparameterTuningJob",}, + package="google.cloud.aiplatform.v1beta1", + manifest={ + "HyperparameterTuningJob", + }, ) @@ -99,7 +102,11 @@ class HyperparameterTuningJob(proto.Message): display_name = proto.Field(proto.STRING, number=2) - study_spec = proto.Field(proto.MESSAGE, number=4, message=study.StudySpec,) + study_spec = proto.Field( + proto.MESSAGE, + number=4, + message=study.StudySpec, + ) max_trial_count = proto.Field(proto.INT32, number=5) @@ -108,22 +115,52 @@ class HyperparameterTuningJob(proto.Message): max_failed_trial_count = proto.Field(proto.INT32, number=7) trial_job_spec = proto.Field( - proto.MESSAGE, number=8, message=custom_job.CustomJobSpec, + proto.MESSAGE, + number=8, + message=custom_job.CustomJobSpec, ) - trials = proto.RepeatedField(proto.MESSAGE, number=9, message=study.Trial,) + trials = proto.RepeatedField( + proto.MESSAGE, + number=9, + message=study.Trial, + ) - state = proto.Field(proto.ENUM, number=10, enum=job_state.JobState,) + state = proto.Field( + proto.ENUM, + number=10, + enum=job_state.JobState, + ) - create_time = proto.Field(proto.MESSAGE, number=11, message=timestamp.Timestamp,) + create_time = proto.Field( + proto.MESSAGE, + number=11, + message=timestamp.Timestamp, + ) - start_time = proto.Field(proto.MESSAGE, number=12, message=timestamp.Timestamp,) + start_time = proto.Field( + proto.MESSAGE, + number=12, + message=timestamp.Timestamp, + ) - end_time = proto.Field(proto.MESSAGE, number=13, message=timestamp.Timestamp,) + end_time = proto.Field( + proto.MESSAGE, + number=13, + message=timestamp.Timestamp, + ) - update_time = proto.Field(proto.MESSAGE, number=14, message=timestamp.Timestamp,) + update_time = proto.Field( + proto.MESSAGE, + number=14, + message=timestamp.Timestamp, + ) - error = proto.Field(proto.MESSAGE, number=15, message=status.Status,) + error = proto.Field( + proto.MESSAGE, + number=15, + message=status.Status, + ) labels = proto.MapField(proto.STRING, proto.STRING, number=16) diff --git a/google/cloud/aiplatform_v1beta1/types/job_service.py b/google/cloud/aiplatform_v1beta1/types/job_service.py index f64f07cbe3..9962e81c40 100644 --- a/google/cloud/aiplatform_v1beta1/types/job_service.py +++ b/google/cloud/aiplatform_v1beta1/types/job_service.py @@ -77,7 +77,11 @@ class CreateCustomJobRequest(proto.Message): parent = proto.Field(proto.STRING, number=1) - custom_job = proto.Field(proto.MESSAGE, number=2, message=gca_custom_job.CustomJob,) + custom_job = proto.Field( + proto.MESSAGE, + number=2, + message=gca_custom_job.CustomJob, + ) class GetCustomJobRequest(proto.Message): @@ -140,7 +144,11 @@ class ListCustomJobsRequest(proto.Message): page_token = proto.Field(proto.STRING, number=4) - read_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask.FieldMask,) + read_mask = proto.Field( + proto.MESSAGE, + number=5, + message=field_mask.FieldMask, + ) class ListCustomJobsResponse(proto.Message): @@ -161,7 +169,9 @@ def raw_page(self): return self custom_jobs = proto.RepeatedField( - proto.MESSAGE, number=1, message=gca_custom_job.CustomJob, + proto.MESSAGE, + number=1, + message=gca_custom_job.CustomJob, ) next_page_token = proto.Field(proto.STRING, number=2) @@ -209,7 +219,9 @@ class CreateDataLabelingJobRequest(proto.Message): parent = proto.Field(proto.STRING, number=1) data_labeling_job = proto.Field( - proto.MESSAGE, number=2, message=gca_data_labeling_job.DataLabelingJob, + proto.MESSAGE, + number=2, + message=gca_data_labeling_job.DataLabelingJob, ) @@ -275,7 +287,11 @@ class ListDataLabelingJobsRequest(proto.Message): page_token = proto.Field(proto.STRING, number=4) - read_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask.FieldMask,) + read_mask = proto.Field( + proto.MESSAGE, + number=5, + message=field_mask.FieldMask, + ) order_by = proto.Field(proto.STRING, number=6) @@ -297,7 +313,9 @@ def raw_page(self): return self data_labeling_jobs = proto.RepeatedField( - proto.MESSAGE, number=1, message=gca_data_labeling_job.DataLabelingJob, + proto.MESSAGE, + number=1, + message=gca_data_labeling_job.DataLabelingJob, ) next_page_token = proto.Field(proto.STRING, number=2) @@ -417,7 +435,11 @@ class ListHyperparameterTuningJobsRequest(proto.Message): page_token = proto.Field(proto.STRING, number=4) - read_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask.FieldMask,) + read_mask = proto.Field( + proto.MESSAGE, + number=5, + message=field_mask.FieldMask, + ) class ListHyperparameterTuningJobsResponse(proto.Message): @@ -494,7 +516,9 @@ class CreateBatchPredictionJobRequest(proto.Message): parent = proto.Field(proto.STRING, number=1) batch_prediction_job = proto.Field( - proto.MESSAGE, number=2, message=gca_batch_prediction_job.BatchPredictionJob, + proto.MESSAGE, + number=2, + message=gca_batch_prediction_job.BatchPredictionJob, ) @@ -560,7 +584,11 @@ class ListBatchPredictionJobsRequest(proto.Message): page_token = proto.Field(proto.STRING, number=4) - read_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask.FieldMask,) + read_mask = proto.Field( + proto.MESSAGE, + number=5, + message=field_mask.FieldMask, + ) class ListBatchPredictionJobsResponse(proto.Message): @@ -582,7 +610,9 @@ def raw_page(self): return self batch_prediction_jobs = proto.RepeatedField( - proto.MESSAGE, number=1, message=gca_batch_prediction_job.BatchPredictionJob, + proto.MESSAGE, + number=1, + message=gca_batch_prediction_job.BatchPredictionJob, ) next_page_token = proto.Field(proto.STRING, number=2) diff --git a/google/cloud/aiplatform_v1beta1/types/job_state.py b/google/cloud/aiplatform_v1beta1/types/job_state.py index f86e179b1b..2baf9e447d 100644 --- a/google/cloud/aiplatform_v1beta1/types/job_state.py +++ b/google/cloud/aiplatform_v1beta1/types/job_state.py @@ -19,7 +19,10 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", manifest={"JobState",}, + package="google.cloud.aiplatform.v1beta1", + manifest={ + "JobState", + }, ) diff --git a/google/cloud/aiplatform_v1beta1/types/machine_resources.py b/google/cloud/aiplatform_v1beta1/types/machine_resources.py index c71aca024e..eefaa7240e 100644 --- a/google/cloud/aiplatform_v1beta1/types/machine_resources.py +++ b/google/cloud/aiplatform_v1beta1/types/machine_resources.py @@ -92,7 +92,9 @@ class MachineSpec(proto.Message): machine_type = proto.Field(proto.STRING, number=1) accelerator_type = proto.Field( - proto.ENUM, number=2, enum=gca_accelerator_type.AcceleratorType, + proto.ENUM, + number=2, + enum=gca_accelerator_type.AcceleratorType, ) accelerator_count = proto.Field(proto.INT32, number=3) @@ -131,7 +133,11 @@ class DedicatedResources(proto.Message): as the default value. """ - machine_spec = proto.Field(proto.MESSAGE, number=1, message="MachineSpec",) + machine_spec = proto.Field( + proto.MESSAGE, + number=1, + message="MachineSpec", + ) min_replica_count = proto.Field(proto.INT32, number=2) @@ -195,7 +201,11 @@ class BatchDedicatedResources(proto.Message): The default value is 10. """ - machine_spec = proto.Field(proto.MESSAGE, number=1, message="MachineSpec",) + machine_spec = proto.Field( + proto.MESSAGE, + number=1, + message="MachineSpec", + ) starting_replica_count = proto.Field(proto.INT32, number=2) diff --git a/google/cloud/aiplatform_v1beta1/types/manual_batch_tuning_parameters.py b/google/cloud/aiplatform_v1beta1/types/manual_batch_tuning_parameters.py index 7a467d5069..849c24b16c 100644 --- a/google/cloud/aiplatform_v1beta1/types/manual_batch_tuning_parameters.py +++ b/google/cloud/aiplatform_v1beta1/types/manual_batch_tuning_parameters.py @@ -20,7 +20,9 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1", - manifest={"ManualBatchTuningParameters",}, + manifest={ + "ManualBatchTuningParameters", + }, ) diff --git a/google/cloud/aiplatform_v1beta1/types/migratable_resource.py b/google/cloud/aiplatform_v1beta1/types/migratable_resource.py index 99a6e65a42..689994f38e 100644 --- a/google/cloud/aiplatform_v1beta1/types/migratable_resource.py +++ b/google/cloud/aiplatform_v1beta1/types/migratable_resource.py @@ -22,7 +22,10 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", manifest={"MigratableResource",}, + package="google.cloud.aiplatform.v1beta1", + manifest={ + "MigratableResource", + }, ) @@ -152,27 +155,43 @@ class DataLabelingAnnotatedDataset(proto.Message): ) ml_engine_model_version = proto.Field( - proto.MESSAGE, number=1, oneof="resource", message=MlEngineModelVersion, + proto.MESSAGE, + number=1, + oneof="resource", + message=MlEngineModelVersion, ) automl_model = proto.Field( - proto.MESSAGE, number=2, oneof="resource", message=AutomlModel, + proto.MESSAGE, + number=2, + oneof="resource", + message=AutomlModel, ) automl_dataset = proto.Field( - proto.MESSAGE, number=3, oneof="resource", message=AutomlDataset, + proto.MESSAGE, + number=3, + oneof="resource", + message=AutomlDataset, ) data_labeling_dataset = proto.Field( - proto.MESSAGE, number=4, oneof="resource", message=DataLabelingDataset, + proto.MESSAGE, + number=4, + oneof="resource", + message=DataLabelingDataset, ) last_migrate_time = proto.Field( - proto.MESSAGE, number=5, message=timestamp.Timestamp, + proto.MESSAGE, + number=5, + message=timestamp.Timestamp, ) last_update_time = proto.Field( - proto.MESSAGE, number=6, message=timestamp.Timestamp, + proto.MESSAGE, + number=6, + message=timestamp.Timestamp, ) diff --git a/google/cloud/aiplatform_v1beta1/types/migration_service.py b/google/cloud/aiplatform_v1beta1/types/migration_service.py index 46b0cdc66b..cd31a3283c 100644 --- a/google/cloud/aiplatform_v1beta1/types/migration_service.py +++ b/google/cloud/aiplatform_v1beta1/types/migration_service.py @@ -82,7 +82,9 @@ def raw_page(self): return self migratable_resources = proto.RepeatedField( - proto.MESSAGE, number=1, message=gca_migratable_resource.MigratableResource, + proto.MESSAGE, + number=1, + message=gca_migratable_resource.MigratableResource, ) next_page_token = proto.Field(proto.STRING, number=2) @@ -106,7 +108,9 @@ class BatchMigrateResourcesRequest(proto.Message): parent = proto.Field(proto.STRING, number=1) migrate_resource_requests = proto.RepeatedField( - proto.MESSAGE, number=2, message="MigrateResourceRequest", + proto.MESSAGE, + number=2, + message="MigrateResourceRequest", ) @@ -251,11 +255,17 @@ class MigrateDataLabelingAnnotatedDatasetConfig(proto.Message): ) migrate_automl_model_config = proto.Field( - proto.MESSAGE, number=2, oneof="request", message=MigrateAutomlModelConfig, + proto.MESSAGE, + number=2, + oneof="request", + message=MigrateAutomlModelConfig, ) migrate_automl_dataset_config = proto.Field( - proto.MESSAGE, number=3, oneof="request", message=MigrateAutomlDatasetConfig, + proto.MESSAGE, + number=3, + oneof="request", + message=MigrateAutomlDatasetConfig, ) migrate_data_labeling_dataset_config = proto.Field( @@ -276,7 +286,9 @@ class BatchMigrateResourcesResponse(proto.Message): """ migrate_resource_responses = proto.RepeatedField( - proto.MESSAGE, number=1, message="MigrateResourceResponse", + proto.MESSAGE, + number=1, + message="MigrateResourceResponse", ) @@ -299,7 +311,9 @@ class MigrateResourceResponse(proto.Message): model = proto.Field(proto.STRING, number=2, oneof="migrated_resource") migratable_resource = proto.Field( - proto.MESSAGE, number=3, message=gca_migratable_resource.MigratableResource, + proto.MESSAGE, + number=3, + message=gca_migratable_resource.MigratableResource, ) @@ -313,7 +327,9 @@ class BatchMigrateResourcesOperationMetadata(proto.Message): """ generic_metadata = proto.Field( - proto.MESSAGE, number=1, message=operation.GenericOperationMetadata, + proto.MESSAGE, + number=1, + message=operation.GenericOperationMetadata, ) diff --git a/google/cloud/aiplatform_v1beta1/types/model.py b/google/cloud/aiplatform_v1beta1/types/model.py index 21e8c41034..08528748dc 100644 --- a/google/cloud/aiplatform_v1beta1/types/model.py +++ b/google/cloud/aiplatform_v1beta1/types/model.py @@ -27,7 +27,12 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1", - manifest={"Model", "PredictSchemata", "ModelContainerSpec", "Port",}, + manifest={ + "Model", + "PredictSchemata", + "ModelContainerSpec", + "Port", + }, ) @@ -274,7 +279,9 @@ class ExportableContent(proto.Enum): id = proto.Field(proto.STRING, number=1) exportable_contents = proto.RepeatedField( - proto.ENUM, number=2, enum="Model.ExportFormat.ExportableContent", + proto.ENUM, + number=2, + enum="Model.ExportFormat.ExportableContent", ) name = proto.Field(proto.STRING, number=1) @@ -283,40 +290,68 @@ class ExportableContent(proto.Enum): description = proto.Field(proto.STRING, number=3) - predict_schemata = proto.Field(proto.MESSAGE, number=4, message="PredictSchemata",) + predict_schemata = proto.Field( + proto.MESSAGE, + number=4, + message="PredictSchemata", + ) metadata_schema_uri = proto.Field(proto.STRING, number=5) - metadata = proto.Field(proto.MESSAGE, number=6, message=struct.Value,) + metadata = proto.Field( + proto.MESSAGE, + number=6, + message=struct.Value, + ) supported_export_formats = proto.RepeatedField( - proto.MESSAGE, number=20, message=ExportFormat, + proto.MESSAGE, + number=20, + message=ExportFormat, ) training_pipeline = proto.Field(proto.STRING, number=7) - container_spec = proto.Field(proto.MESSAGE, number=9, message="ModelContainerSpec",) + container_spec = proto.Field( + proto.MESSAGE, + number=9, + message="ModelContainerSpec", + ) artifact_uri = proto.Field(proto.STRING, number=26) supported_deployment_resources_types = proto.RepeatedField( - proto.ENUM, number=10, enum=DeploymentResourcesType, + proto.ENUM, + number=10, + enum=DeploymentResourcesType, ) supported_input_storage_formats = proto.RepeatedField(proto.STRING, number=11) supported_output_storage_formats = proto.RepeatedField(proto.STRING, number=12) - create_time = proto.Field(proto.MESSAGE, number=13, message=timestamp.Timestamp,) + create_time = proto.Field( + proto.MESSAGE, + number=13, + message=timestamp.Timestamp, + ) - update_time = proto.Field(proto.MESSAGE, number=14, message=timestamp.Timestamp,) + update_time = proto.Field( + proto.MESSAGE, + number=14, + message=timestamp.Timestamp, + ) deployed_models = proto.RepeatedField( - proto.MESSAGE, number=15, message=deployed_model_ref.DeployedModelRef, + proto.MESSAGE, + number=15, + message=deployed_model_ref.DeployedModelRef, ) explanation_spec = proto.Field( - proto.MESSAGE, number=23, message=explanation.ExplanationSpec, + proto.MESSAGE, + number=23, + message=explanation.ExplanationSpec, ) etag = proto.Field(proto.STRING, number=16) @@ -623,9 +658,17 @@ class ModelContainerSpec(proto.Message): args = proto.RepeatedField(proto.STRING, number=3) - env = proto.RepeatedField(proto.MESSAGE, number=4, message=env_var.EnvVar,) + env = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=env_var.EnvVar, + ) - ports = proto.RepeatedField(proto.MESSAGE, number=5, message="Port",) + ports = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="Port", + ) predict_route = proto.Field(proto.STRING, number=6) diff --git a/google/cloud/aiplatform_v1beta1/types/model_evaluation.py b/google/cloud/aiplatform_v1beta1/types/model_evaluation.py index b768ed978e..7a55d1e7fc 100644 --- a/google/cloud/aiplatform_v1beta1/types/model_evaluation.py +++ b/google/cloud/aiplatform_v1beta1/types/model_evaluation.py @@ -24,7 +24,10 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", manifest={"ModelEvaluation",}, + package="google.cloud.aiplatform.v1beta1", + manifest={ + "ModelEvaluation", + }, ) @@ -71,14 +74,24 @@ class ModelEvaluation(proto.Message): metrics_schema_uri = proto.Field(proto.STRING, number=2) - metrics = proto.Field(proto.MESSAGE, number=3, message=struct.Value,) + metrics = proto.Field( + proto.MESSAGE, + number=3, + message=struct.Value, + ) - create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) + create_time = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp.Timestamp, + ) slice_dimensions = proto.RepeatedField(proto.STRING, number=5) model_explanation = proto.Field( - proto.MESSAGE, number=8, message=explanation.ModelExplanation, + proto.MESSAGE, + number=8, + message=explanation.ModelExplanation, ) diff --git a/google/cloud/aiplatform_v1beta1/types/model_evaluation_slice.py b/google/cloud/aiplatform_v1beta1/types/model_evaluation_slice.py index 1039d32c1f..af37ef736c 100644 --- a/google/cloud/aiplatform_v1beta1/types/model_evaluation_slice.py +++ b/google/cloud/aiplatform_v1beta1/types/model_evaluation_slice.py @@ -23,7 +23,10 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", manifest={"ModelEvaluationSlice",}, + package="google.cloud.aiplatform.v1beta1", + manifest={ + "ModelEvaluationSlice", + }, ) @@ -79,13 +82,25 @@ class Slice(proto.Message): name = proto.Field(proto.STRING, number=1) - slice_ = proto.Field(proto.MESSAGE, number=2, message=Slice,) + slice_ = proto.Field( + proto.MESSAGE, + number=2, + message=Slice, + ) metrics_schema_uri = proto.Field(proto.STRING, number=3) - metrics = proto.Field(proto.MESSAGE, number=4, message=struct.Value,) - - create_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,) + metrics = proto.Field( + proto.MESSAGE, + number=4, + message=struct.Value, + ) + + create_time = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp.Timestamp, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/aiplatform_v1beta1/types/model_service.py b/google/cloud/aiplatform_v1beta1/types/model_service.py index 3cfb17ad2c..4b783e7fa8 100644 --- a/google/cloud/aiplatform_v1beta1/types/model_service.py +++ b/google/cloud/aiplatform_v1beta1/types/model_service.py @@ -65,7 +65,11 @@ class UploadModelRequest(proto.Message): parent = proto.Field(proto.STRING, number=1) - model = proto.Field(proto.MESSAGE, number=2, message=gca_model.Model,) + model = proto.Field( + proto.MESSAGE, + number=2, + message=gca_model.Model, + ) class UploadModelOperationMetadata(proto.Message): @@ -79,7 +83,9 @@ class UploadModelOperationMetadata(proto.Message): """ generic_metadata = proto.Field( - proto.MESSAGE, number=1, message=operation.GenericOperationMetadata, + proto.MESSAGE, + number=1, + message=operation.GenericOperationMetadata, ) @@ -141,7 +147,11 @@ class ListModelsRequest(proto.Message): page_token = proto.Field(proto.STRING, number=4) - read_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask.FieldMask,) + read_mask = proto.Field( + proto.MESSAGE, + number=5, + message=field_mask.FieldMask, + ) class ListModelsResponse(proto.Message): @@ -161,7 +171,11 @@ class ListModelsResponse(proto.Message): def raw_page(self): return self - models = proto.RepeatedField(proto.MESSAGE, number=1, message=gca_model.Model,) + models = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_model.Model, + ) next_page_token = proto.Field(proto.STRING, number=2) @@ -181,9 +195,17 @@ class UpdateModelRequest(proto.Message): [FieldMask](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask). """ - model = proto.Field(proto.MESSAGE, number=1, message=gca_model.Model,) + model = proto.Field( + proto.MESSAGE, + number=1, + message=gca_model.Model, + ) - update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + update_mask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask.FieldMask, + ) class DeleteModelRequest(proto.Message): @@ -245,16 +267,24 @@ class OutputConfig(proto.Message): export_format_id = proto.Field(proto.STRING, number=1) artifact_destination = proto.Field( - proto.MESSAGE, number=3, message=io.GcsDestination, + proto.MESSAGE, + number=3, + message=io.GcsDestination, ) image_destination = proto.Field( - proto.MESSAGE, number=4, message=io.ContainerRegistryDestination, + proto.MESSAGE, + number=4, + message=io.ContainerRegistryDestination, ) name = proto.Field(proto.STRING, number=1) - output_config = proto.Field(proto.MESSAGE, number=2, message=OutputConfig,) + output_config = proto.Field( + proto.MESSAGE, + number=2, + message=OutputConfig, + ) class ExportModelOperationMetadata(proto.Message): @@ -292,10 +322,16 @@ class OutputInfo(proto.Message): image_output_uri = proto.Field(proto.STRING, number=3) generic_metadata = proto.Field( - proto.MESSAGE, number=1, message=operation.GenericOperationMetadata, + proto.MESSAGE, + number=1, + message=operation.GenericOperationMetadata, ) - output_info = proto.Field(proto.MESSAGE, number=2, message=OutputInfo,) + output_info = proto.Field( + proto.MESSAGE, + number=2, + message=OutputInfo, + ) class ExportModelResponse(proto.Message): @@ -350,7 +386,11 @@ class ListModelEvaluationsRequest(proto.Message): page_token = proto.Field(proto.STRING, number=4) - read_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask.FieldMask,) + read_mask = proto.Field( + proto.MESSAGE, + number=5, + message=field_mask.FieldMask, + ) class ListModelEvaluationsResponse(proto.Message): @@ -372,7 +412,9 @@ def raw_page(self): return self model_evaluations = proto.RepeatedField( - proto.MESSAGE, number=1, message=model_evaluation.ModelEvaluation, + proto.MESSAGE, + number=1, + message=model_evaluation.ModelEvaluation, ) next_page_token = proto.Field(proto.STRING, number=2) @@ -427,7 +469,11 @@ class ListModelEvaluationSlicesRequest(proto.Message): page_token = proto.Field(proto.STRING, number=4) - read_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask.FieldMask,) + read_mask = proto.Field( + proto.MESSAGE, + number=5, + message=field_mask.FieldMask, + ) class ListModelEvaluationSlicesResponse(proto.Message): @@ -449,7 +495,9 @@ def raw_page(self): return self model_evaluation_slices = proto.RepeatedField( - proto.MESSAGE, number=1, message=model_evaluation_slice.ModelEvaluationSlice, + proto.MESSAGE, + number=1, + message=model_evaluation_slice.ModelEvaluationSlice, ) next_page_token = proto.Field(proto.STRING, number=2) diff --git a/google/cloud/aiplatform_v1beta1/types/operation.py b/google/cloud/aiplatform_v1beta1/types/operation.py index 68fb0daead..c9d084cbfa 100644 --- a/google/cloud/aiplatform_v1beta1/types/operation.py +++ b/google/cloud/aiplatform_v1beta1/types/operation.py @@ -24,7 +24,10 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1", - manifest={"GenericOperationMetadata", "DeleteOperationMetadata",}, + manifest={ + "GenericOperationMetadata", + "DeleteOperationMetadata", + }, ) @@ -49,12 +52,22 @@ class GenericOperationMetadata(proto.Message): """ partial_failures = proto.RepeatedField( - proto.MESSAGE, number=1, message=status.Status, + proto.MESSAGE, + number=1, + message=status.Status, ) - create_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) + create_time = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp.Timestamp, + ) - update_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,) + update_time = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp.Timestamp, + ) class DeleteOperationMetadata(proto.Message): @@ -66,7 +79,9 @@ class DeleteOperationMetadata(proto.Message): """ generic_metadata = proto.Field( - proto.MESSAGE, number=1, message="GenericOperationMetadata", + proto.MESSAGE, + number=1, + message="GenericOperationMetadata", ) diff --git a/google/cloud/aiplatform_v1beta1/types/pipeline_service.py b/google/cloud/aiplatform_v1beta1/types/pipeline_service.py index 9f0856732d..208ed5006a 100644 --- a/google/cloud/aiplatform_v1beta1/types/pipeline_service.py +++ b/google/cloud/aiplatform_v1beta1/types/pipeline_service.py @@ -53,7 +53,9 @@ class CreateTrainingPipelineRequest(proto.Message): parent = proto.Field(proto.STRING, number=1) training_pipeline = proto.Field( - proto.MESSAGE, number=2, message=gca_training_pipeline.TrainingPipeline, + proto.MESSAGE, + number=2, + message=gca_training_pipeline.TrainingPipeline, ) @@ -116,7 +118,11 @@ class ListTrainingPipelinesRequest(proto.Message): page_token = proto.Field(proto.STRING, number=4) - read_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask.FieldMask,) + read_mask = proto.Field( + proto.MESSAGE, + number=5, + message=field_mask.FieldMask, + ) class ListTrainingPipelinesResponse(proto.Message): @@ -138,7 +144,9 @@ def raw_page(self): return self training_pipelines = proto.RepeatedField( - proto.MESSAGE, number=1, message=gca_training_pipeline.TrainingPipeline, + proto.MESSAGE, + number=1, + message=gca_training_pipeline.TrainingPipeline, ) next_page_token = proto.Field(proto.STRING, number=2) diff --git a/google/cloud/aiplatform_v1beta1/types/pipeline_state.py b/google/cloud/aiplatform_v1beta1/types/pipeline_state.py index cede653bd6..9c52592838 100644 --- a/google/cloud/aiplatform_v1beta1/types/pipeline_state.py +++ b/google/cloud/aiplatform_v1beta1/types/pipeline_state.py @@ -19,7 +19,10 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", manifest={"PipelineState",}, + package="google.cloud.aiplatform.v1beta1", + manifest={ + "PipelineState", + }, ) diff --git a/google/cloud/aiplatform_v1beta1/types/prediction_service.py b/google/cloud/aiplatform_v1beta1/types/prediction_service.py index b000f88bf8..3e5f8d7be8 100644 --- a/google/cloud/aiplatform_v1beta1/types/prediction_service.py +++ b/google/cloud/aiplatform_v1beta1/types/prediction_service.py @@ -65,9 +65,17 @@ class PredictRequest(proto.Message): endpoint = proto.Field(proto.STRING, number=1) - instances = proto.RepeatedField(proto.MESSAGE, number=2, message=struct.Value,) + instances = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=struct.Value, + ) - parameters = proto.Field(proto.MESSAGE, number=3, message=struct.Value,) + parameters = proto.Field( + proto.MESSAGE, + number=3, + message=struct.Value, + ) class PredictResponse(proto.Message): @@ -87,7 +95,11 @@ class PredictResponse(proto.Message): served this prediction. """ - predictions = proto.RepeatedField(proto.MESSAGE, number=1, message=struct.Value,) + predictions = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=struct.Value, + ) deployed_model_id = proto.Field(proto.STRING, number=2) @@ -128,9 +140,17 @@ class ExplainRequest(proto.Message): endpoint = proto.Field(proto.STRING, number=1) - instances = proto.RepeatedField(proto.MESSAGE, number=2, message=struct.Value,) + instances = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=struct.Value, + ) - parameters = proto.Field(proto.MESSAGE, number=4, message=struct.Value,) + parameters = proto.Field( + proto.MESSAGE, + number=4, + message=struct.Value, + ) deployed_model_id = proto.Field(proto.STRING, number=3) @@ -157,12 +177,18 @@ class ExplainResponse(proto.Message): """ explanations = proto.RepeatedField( - proto.MESSAGE, number=1, message=explanation.Explanation, + proto.MESSAGE, + number=1, + message=explanation.Explanation, ) deployed_model_id = proto.Field(proto.STRING, number=2) - predictions = proto.RepeatedField(proto.MESSAGE, number=3, message=struct.Value,) + predictions = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=struct.Value, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/aiplatform_v1beta1/types/specialist_pool.py b/google/cloud/aiplatform_v1beta1/types/specialist_pool.py index 4ac8c6a709..9b23b5c3c1 100644 --- a/google/cloud/aiplatform_v1beta1/types/specialist_pool.py +++ b/google/cloud/aiplatform_v1beta1/types/specialist_pool.py @@ -19,7 +19,10 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", manifest={"SpecialistPool",}, + package="google.cloud.aiplatform.v1beta1", + manifest={ + "SpecialistPool", + }, ) diff --git a/google/cloud/aiplatform_v1beta1/types/specialist_pool_service.py b/google/cloud/aiplatform_v1beta1/types/specialist_pool_service.py index 724f7165a6..811ac554ce 100644 --- a/google/cloud/aiplatform_v1beta1/types/specialist_pool_service.py +++ b/google/cloud/aiplatform_v1beta1/types/specialist_pool_service.py @@ -54,7 +54,9 @@ class CreateSpecialistPoolRequest(proto.Message): parent = proto.Field(proto.STRING, number=1) specialist_pool = proto.Field( - proto.MESSAGE, number=2, message=gca_specialist_pool.SpecialistPool, + proto.MESSAGE, + number=2, + message=gca_specialist_pool.SpecialistPool, ) @@ -68,7 +70,9 @@ class CreateSpecialistPoolOperationMetadata(proto.Message): """ generic_metadata = proto.Field( - proto.MESSAGE, number=1, message=operation.GenericOperationMetadata, + proto.MESSAGE, + number=1, + message=operation.GenericOperationMetadata, ) @@ -114,7 +118,11 @@ class ListSpecialistPoolsRequest(proto.Message): page_token = proto.Field(proto.STRING, number=3) - read_mask = proto.Field(proto.MESSAGE, number=4, message=field_mask.FieldMask,) + read_mask = proto.Field( + proto.MESSAGE, + number=4, + message=field_mask.FieldMask, + ) class ListSpecialistPoolsResponse(proto.Message): @@ -134,7 +142,9 @@ def raw_page(self): return self specialist_pools = proto.RepeatedField( - proto.MESSAGE, number=1, message=gca_specialist_pool.SpecialistPool, + proto.MESSAGE, + number=1, + message=gca_specialist_pool.SpecialistPool, ) next_page_token = proto.Field(proto.STRING, number=2) @@ -175,10 +185,16 @@ class UpdateSpecialistPoolRequest(proto.Message): """ specialist_pool = proto.Field( - proto.MESSAGE, number=1, message=gca_specialist_pool.SpecialistPool, + proto.MESSAGE, + number=1, + message=gca_specialist_pool.SpecialistPool, ) - update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + update_mask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask.FieldMask, + ) class UpdateSpecialistPoolOperationMetadata(proto.Message): @@ -198,7 +214,9 @@ class UpdateSpecialistPoolOperationMetadata(proto.Message): specialist_pool = proto.Field(proto.STRING, number=1) generic_metadata = proto.Field( - proto.MESSAGE, number=2, message=operation.GenericOperationMetadata, + proto.MESSAGE, + number=2, + message=operation.GenericOperationMetadata, ) diff --git a/google/cloud/aiplatform_v1beta1/types/study.py b/google/cloud/aiplatform_v1beta1/types/study.py index 2d6f4ae8c3..06abf97ac1 100644 --- a/google/cloud/aiplatform_v1beta1/types/study.py +++ b/google/cloud/aiplatform_v1beta1/types/study.py @@ -24,7 +24,11 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1", - manifest={"Trial", "StudySpec", "Measurement",}, + manifest={ + "Trial", + "StudySpec", + "Measurement", + }, ) @@ -82,19 +86,43 @@ class Parameter(proto.Message): parameter_id = proto.Field(proto.STRING, number=1) - value = proto.Field(proto.MESSAGE, number=2, message=struct.Value,) + value = proto.Field( + proto.MESSAGE, + number=2, + message=struct.Value, + ) id = proto.Field(proto.STRING, number=2) - state = proto.Field(proto.ENUM, number=3, enum=State,) - - parameters = proto.RepeatedField(proto.MESSAGE, number=4, message=Parameter,) - - final_measurement = proto.Field(proto.MESSAGE, number=5, message="Measurement",) - - start_time = proto.Field(proto.MESSAGE, number=7, message=timestamp.Timestamp,) - - end_time = proto.Field(proto.MESSAGE, number=8, message=timestamp.Timestamp,) + state = proto.Field( + proto.ENUM, + number=3, + enum=State, + ) + + parameters = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=Parameter, + ) + + final_measurement = proto.Field( + proto.MESSAGE, + number=5, + message="Measurement", + ) + + start_time = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp.Timestamp, + ) + + end_time = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp.Timestamp, + ) custom_job = proto.Field(proto.STRING, number=11) @@ -138,7 +166,11 @@ class GoalType(proto.Enum): metric_id = proto.Field(proto.STRING, number=1) - goal = proto.Field(proto.ENUM, number=2, enum="StudySpec.MetricSpec.GoalType",) + goal = proto.Field( + proto.ENUM, + number=2, + enum="StudySpec.MetricSpec.GoalType", + ) class ParameterSpec(proto.Message): r"""Represents a single parameter to optimize. @@ -313,7 +345,9 @@ class CategoricalValueCondition(proto.Message): ) parameter_spec = proto.Field( - proto.MESSAGE, number=1, message="StudySpec.ParameterSpec", + proto.MESSAGE, + number=1, + message="StudySpec.ParameterSpec", ) double_value_spec = proto.Field( @@ -347,7 +381,9 @@ class CategoricalValueCondition(proto.Message): parameter_id = proto.Field(proto.STRING, number=1) scale_type = proto.Field( - proto.ENUM, number=6, enum="StudySpec.ParameterSpec.ScaleType", + proto.ENUM, + number=6, + enum="StudySpec.ParameterSpec.ScaleType", ) conditional_parameter_specs = proto.RepeatedField( @@ -356,11 +392,23 @@ class CategoricalValueCondition(proto.Message): message="StudySpec.ParameterSpec.ConditionalParameterSpec", ) - metrics = proto.RepeatedField(proto.MESSAGE, number=1, message=MetricSpec,) + metrics = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=MetricSpec, + ) - parameters = proto.RepeatedField(proto.MESSAGE, number=2, message=ParameterSpec,) + parameters = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=ParameterSpec, + ) - algorithm = proto.Field(proto.ENUM, number=3, enum=Algorithm,) + algorithm = proto.Field( + proto.ENUM, + number=3, + enum=Algorithm, + ) class Measurement(proto.Message): @@ -397,7 +445,11 @@ class Metric(proto.Message): step_count = proto.Field(proto.INT64, number=2) - metrics = proto.RepeatedField(proto.MESSAGE, number=3, message=Metric,) + metrics = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=Metric, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/aiplatform_v1beta1/types/training_pipeline.py b/google/cloud/aiplatform_v1beta1/types/training_pipeline.py index f1f0debaf9..64ef852c5e 100644 --- a/google/cloud/aiplatform_v1beta1/types/training_pipeline.py +++ b/google/cloud/aiplatform_v1beta1/types/training_pipeline.py @@ -146,27 +146,67 @@ class TrainingPipeline(proto.Message): display_name = proto.Field(proto.STRING, number=2) - input_data_config = proto.Field(proto.MESSAGE, number=3, message="InputDataConfig",) + input_data_config = proto.Field( + proto.MESSAGE, + number=3, + message="InputDataConfig", + ) training_task_definition = proto.Field(proto.STRING, number=4) - training_task_inputs = proto.Field(proto.MESSAGE, number=5, message=struct.Value,) + training_task_inputs = proto.Field( + proto.MESSAGE, + number=5, + message=struct.Value, + ) - training_task_metadata = proto.Field(proto.MESSAGE, number=6, message=struct.Value,) + training_task_metadata = proto.Field( + proto.MESSAGE, + number=6, + message=struct.Value, + ) - model_to_upload = proto.Field(proto.MESSAGE, number=7, message=model.Model,) + model_to_upload = proto.Field( + proto.MESSAGE, + number=7, + message=model.Model, + ) - state = proto.Field(proto.ENUM, number=9, enum=pipeline_state.PipelineState,) + state = proto.Field( + proto.ENUM, + number=9, + enum=pipeline_state.PipelineState, + ) - error = proto.Field(proto.MESSAGE, number=10, message=status.Status,) + error = proto.Field( + proto.MESSAGE, + number=10, + message=status.Status, + ) - create_time = proto.Field(proto.MESSAGE, number=11, message=timestamp.Timestamp,) + create_time = proto.Field( + proto.MESSAGE, + number=11, + message=timestamp.Timestamp, + ) - start_time = proto.Field(proto.MESSAGE, number=12, message=timestamp.Timestamp,) + start_time = proto.Field( + proto.MESSAGE, + number=12, + message=timestamp.Timestamp, + ) - end_time = proto.Field(proto.MESSAGE, number=13, message=timestamp.Timestamp,) + end_time = proto.Field( + proto.MESSAGE, + number=13, + message=timestamp.Timestamp, + ) - update_time = proto.Field(proto.MESSAGE, number=14, message=timestamp.Timestamp,) + update_time = proto.Field( + proto.MESSAGE, + number=14, + message=timestamp.Timestamp, + ) labels = proto.MapField(proto.STRING, proto.STRING, number=15) @@ -287,27 +327,45 @@ class InputDataConfig(proto.Message): """ fraction_split = proto.Field( - proto.MESSAGE, number=2, oneof="split", message="FractionSplit", + proto.MESSAGE, + number=2, + oneof="split", + message="FractionSplit", ) filter_split = proto.Field( - proto.MESSAGE, number=3, oneof="split", message="FilterSplit", + proto.MESSAGE, + number=3, + oneof="split", + message="FilterSplit", ) predefined_split = proto.Field( - proto.MESSAGE, number=4, oneof="split", message="PredefinedSplit", + proto.MESSAGE, + number=4, + oneof="split", + message="PredefinedSplit", ) timestamp_split = proto.Field( - proto.MESSAGE, number=5, oneof="split", message="TimestampSplit", + proto.MESSAGE, + number=5, + oneof="split", + message="TimestampSplit", ) gcs_destination = proto.Field( - proto.MESSAGE, number=8, oneof="destination", message=io.GcsDestination, + proto.MESSAGE, + number=8, + oneof="destination", + message=io.GcsDestination, ) bigquery_destination = proto.Field( - proto.MESSAGE, number=10, oneof="destination", message=io.BigQueryDestination, + proto.MESSAGE, + number=10, + oneof="destination", + message=io.BigQueryDestination, ) dataset_id = proto.Field(proto.STRING, number=1) diff --git a/google/cloud/aiplatform_v1beta1/types/user_action_reference.py b/google/cloud/aiplatform_v1beta1/types/user_action_reference.py index 710e4a6d16..742ba69127 100644 --- a/google/cloud/aiplatform_v1beta1/types/user_action_reference.py +++ b/google/cloud/aiplatform_v1beta1/types/user_action_reference.py @@ -19,7 +19,10 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", manifest={"UserActionReference",}, + package="google.cloud.aiplatform.v1beta1", + manifest={ + "UserActionReference", + }, ) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_dataset_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_dataset_service.py index 51022d9fb7..411933eca6 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_dataset_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_dataset_service.py @@ -391,7 +391,9 @@ def test_dataset_service_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions(scopes=["1", "2"],) + options = client_options.ClientOptions( + scopes=["1", "2"], + ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) @@ -459,7 +461,8 @@ def test_create_dataset( transport: str = "grpc", request_type=dataset_service.CreateDatasetRequest ): client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -492,7 +495,8 @@ async def test_create_dataset_async( transport: str = "grpc_asyncio", request_type=dataset_service.CreateDatasetRequest ): client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -524,7 +528,9 @@ async def test_create_dataset_async_from_dict(): def test_create_dataset_field_headers(): - client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -544,12 +550,17 @@ def test_create_dataset_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_create_dataset_field_headers_async(): - client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -571,11 +582,16 @@ async def test_create_dataset_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_create_dataset_flattened(): - client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_dataset), "__call__") as call: @@ -585,7 +601,8 @@ def test_create_dataset_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_dataset( - parent="parent_value", dataset=gca_dataset.Dataset(name="name_value"), + parent="parent_value", + dataset=gca_dataset.Dataset(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -599,7 +616,9 @@ def test_create_dataset_flattened(): def test_create_dataset_flattened_error(): - client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -613,7 +632,9 @@ def test_create_dataset_flattened_error(): @pytest.mark.asyncio async def test_create_dataset_flattened_async(): - client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_dataset), "__call__") as call: @@ -626,7 +647,8 @@ async def test_create_dataset_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_dataset( - parent="parent_value", dataset=gca_dataset.Dataset(name="name_value"), + parent="parent_value", + dataset=gca_dataset.Dataset(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -641,7 +663,9 @@ async def test_create_dataset_flattened_async(): @pytest.mark.asyncio async def test_create_dataset_flattened_error_async(): - client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -657,7 +681,8 @@ def test_get_dataset( transport: str = "grpc", request_type=dataset_service.GetDatasetRequest ): client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -704,7 +729,8 @@ async def test_get_dataset_async( transport: str = "grpc_asyncio", request_type=dataset_service.GetDatasetRequest ): client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -749,7 +775,9 @@ async def test_get_dataset_async_from_dict(): def test_get_dataset_field_headers(): - client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -769,12 +797,17 @@ def test_get_dataset_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_get_dataset_field_headers_async(): - client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -794,11 +827,16 @@ async def test_get_dataset_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_get_dataset_flattened(): - client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_dataset), "__call__") as call: @@ -807,7 +845,9 @@ def test_get_dataset_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_dataset(name="name_value",) + client.get_dataset( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -818,19 +858,24 @@ def test_get_dataset_flattened(): def test_get_dataset_flattened_error(): - client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_dataset( - dataset_service.GetDatasetRequest(), name="name_value", + dataset_service.GetDatasetRequest(), + name="name_value", ) @pytest.mark.asyncio async def test_get_dataset_flattened_async(): - client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_dataset), "__call__") as call: @@ -840,7 +885,9 @@ async def test_get_dataset_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataset.Dataset()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_dataset(name="name_value",) + response = await client.get_dataset( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -852,13 +899,16 @@ async def test_get_dataset_flattened_async(): @pytest.mark.asyncio async def test_get_dataset_flattened_error_async(): - client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_dataset( - dataset_service.GetDatasetRequest(), name="name_value", + dataset_service.GetDatasetRequest(), + name="name_value", ) @@ -866,7 +916,8 @@ def test_update_dataset( transport: str = "grpc", request_type=dataset_service.UpdateDatasetRequest ): client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -913,7 +964,8 @@ async def test_update_dataset_async( transport: str = "grpc_asyncio", request_type=dataset_service.UpdateDatasetRequest ): client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -958,7 +1010,9 @@ async def test_update_dataset_async_from_dict(): def test_update_dataset_field_headers(): - client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -978,14 +1032,17 @@ def test_update_dataset_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "dataset.name=dataset.name/value",) in kw[ - "metadata" - ] + assert ( + "x-goog-request-params", + "dataset.name=dataset.name/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_update_dataset_field_headers_async(): - client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1005,13 +1062,16 @@ async def test_update_dataset_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "dataset.name=dataset.name/value",) in kw[ - "metadata" - ] + assert ( + "x-goog-request-params", + "dataset.name=dataset.name/value", + ) in kw["metadata"] def test_update_dataset_flattened(): - client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_dataset), "__call__") as call: @@ -1036,7 +1096,9 @@ def test_update_dataset_flattened(): def test_update_dataset_flattened_error(): - client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1050,7 +1112,9 @@ def test_update_dataset_flattened_error(): @pytest.mark.asyncio async def test_update_dataset_flattened_async(): - client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_dataset), "__call__") as call: @@ -1077,7 +1141,9 @@ async def test_update_dataset_flattened_async(): @pytest.mark.asyncio async def test_update_dataset_flattened_error_async(): - client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1093,7 +1159,8 @@ def test_list_datasets( transport: str = "grpc", request_type=dataset_service.ListDatasetsRequest ): client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1131,7 +1198,8 @@ async def test_list_datasets_async( transport: str = "grpc_asyncio", request_type=dataset_service.ListDatasetsRequest ): client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1167,7 +1235,9 @@ async def test_list_datasets_async_from_dict(): def test_list_datasets_field_headers(): - client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1187,12 +1257,17 @@ def test_list_datasets_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_list_datasets_field_headers_async(): - client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1214,11 +1289,16 @@ async def test_list_datasets_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_list_datasets_flattened(): - client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: @@ -1227,7 +1307,9 @@ def test_list_datasets_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_datasets(parent="parent_value",) + client.list_datasets( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1238,19 +1320,24 @@ def test_list_datasets_flattened(): def test_list_datasets_flattened_error(): - client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_datasets( - dataset_service.ListDatasetsRequest(), parent="parent_value", + dataset_service.ListDatasetsRequest(), + parent="parent_value", ) @pytest.mark.asyncio async def test_list_datasets_flattened_async(): - client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: @@ -1262,7 +1349,9 @@ async def test_list_datasets_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_datasets(parent="parent_value",) + response = await client.list_datasets( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1274,33 +1363,51 @@ async def test_list_datasets_flattened_async(): @pytest.mark.asyncio async def test_list_datasets_flattened_error_async(): - client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_datasets( - dataset_service.ListDatasetsRequest(), parent="parent_value", + dataset_service.ListDatasetsRequest(), + parent="parent_value", ) def test_list_datasets_pager(): - client = DatasetServiceClient(credentials=credentials.AnonymousCredentials,) + client = DatasetServiceClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( dataset_service.ListDatasetsResponse( - datasets=[dataset.Dataset(), dataset.Dataset(), dataset.Dataset(),], + datasets=[ + dataset.Dataset(), + dataset.Dataset(), + dataset.Dataset(), + ], next_page_token="abc", ), - dataset_service.ListDatasetsResponse(datasets=[], next_page_token="def",), dataset_service.ListDatasetsResponse( - datasets=[dataset.Dataset(),], next_page_token="ghi", + datasets=[], + next_page_token="def", + ), + dataset_service.ListDatasetsResponse( + datasets=[ + dataset.Dataset(), + ], + next_page_token="ghi", ), dataset_service.ListDatasetsResponse( - datasets=[dataset.Dataset(), dataset.Dataset(),], + datasets=[ + dataset.Dataset(), + dataset.Dataset(), + ], ), RuntimeError, ) @@ -1319,22 +1426,37 @@ def test_list_datasets_pager(): def test_list_datasets_pages(): - client = DatasetServiceClient(credentials=credentials.AnonymousCredentials,) + client = DatasetServiceClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( dataset_service.ListDatasetsResponse( - datasets=[dataset.Dataset(), dataset.Dataset(), dataset.Dataset(),], + datasets=[ + dataset.Dataset(), + dataset.Dataset(), + dataset.Dataset(), + ], next_page_token="abc", ), - dataset_service.ListDatasetsResponse(datasets=[], next_page_token="def",), dataset_service.ListDatasetsResponse( - datasets=[dataset.Dataset(),], next_page_token="ghi", + datasets=[], + next_page_token="def", ), dataset_service.ListDatasetsResponse( - datasets=[dataset.Dataset(), dataset.Dataset(),], + datasets=[ + dataset.Dataset(), + ], + next_page_token="ghi", + ), + dataset_service.ListDatasetsResponse( + datasets=[ + dataset.Dataset(), + dataset.Dataset(), + ], ), RuntimeError, ) @@ -1345,7 +1467,9 @@ def test_list_datasets_pages(): @pytest.mark.asyncio async def test_list_datasets_async_pager(): - client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DatasetServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1354,19 +1478,34 @@ async def test_list_datasets_async_pager(): # Set the response to a series of pages. call.side_effect = ( dataset_service.ListDatasetsResponse( - datasets=[dataset.Dataset(), dataset.Dataset(), dataset.Dataset(),], + datasets=[ + dataset.Dataset(), + dataset.Dataset(), + dataset.Dataset(), + ], next_page_token="abc", ), - dataset_service.ListDatasetsResponse(datasets=[], next_page_token="def",), dataset_service.ListDatasetsResponse( - datasets=[dataset.Dataset(),], next_page_token="ghi", + datasets=[], + next_page_token="def", + ), + dataset_service.ListDatasetsResponse( + datasets=[ + dataset.Dataset(), + ], + next_page_token="ghi", ), dataset_service.ListDatasetsResponse( - datasets=[dataset.Dataset(), dataset.Dataset(),], + datasets=[ + dataset.Dataset(), + dataset.Dataset(), + ], ), RuntimeError, ) - async_pager = await client.list_datasets(request={},) + async_pager = await client.list_datasets( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -1378,7 +1517,9 @@ async def test_list_datasets_async_pager(): @pytest.mark.asyncio async def test_list_datasets_async_pages(): - client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DatasetServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1387,15 +1528,28 @@ async def test_list_datasets_async_pages(): # Set the response to a series of pages. call.side_effect = ( dataset_service.ListDatasetsResponse( - datasets=[dataset.Dataset(), dataset.Dataset(), dataset.Dataset(),], + datasets=[ + dataset.Dataset(), + dataset.Dataset(), + dataset.Dataset(), + ], next_page_token="abc", ), - dataset_service.ListDatasetsResponse(datasets=[], next_page_token="def",), dataset_service.ListDatasetsResponse( - datasets=[dataset.Dataset(),], next_page_token="ghi", + datasets=[], + next_page_token="def", ), dataset_service.ListDatasetsResponse( - datasets=[dataset.Dataset(), dataset.Dataset(),], + datasets=[ + dataset.Dataset(), + ], + next_page_token="ghi", + ), + dataset_service.ListDatasetsResponse( + datasets=[ + dataset.Dataset(), + dataset.Dataset(), + ], ), RuntimeError, ) @@ -1410,7 +1564,8 @@ def test_delete_dataset( transport: str = "grpc", request_type=dataset_service.DeleteDatasetRequest ): client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1443,7 +1598,8 @@ async def test_delete_dataset_async( transport: str = "grpc_asyncio", request_type=dataset_service.DeleteDatasetRequest ): client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1475,7 +1631,9 @@ async def test_delete_dataset_async_from_dict(): def test_delete_dataset_field_headers(): - client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1495,12 +1653,17 @@ def test_delete_dataset_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_delete_dataset_field_headers_async(): - client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1522,11 +1685,16 @@ async def test_delete_dataset_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_delete_dataset_flattened(): - client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_dataset), "__call__") as call: @@ -1535,7 +1703,9 @@ def test_delete_dataset_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_dataset(name="name_value",) + client.delete_dataset( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1546,19 +1716,24 @@ def test_delete_dataset_flattened(): def test_delete_dataset_flattened_error(): - client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_dataset( - dataset_service.DeleteDatasetRequest(), name="name_value", + dataset_service.DeleteDatasetRequest(), + name="name_value", ) @pytest.mark.asyncio async def test_delete_dataset_flattened_async(): - client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_dataset), "__call__") as call: @@ -1570,7 +1745,9 @@ async def test_delete_dataset_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_dataset(name="name_value",) + response = await client.delete_dataset( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1582,13 +1759,16 @@ async def test_delete_dataset_flattened_async(): @pytest.mark.asyncio async def test_delete_dataset_flattened_error_async(): - client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_dataset( - dataset_service.DeleteDatasetRequest(), name="name_value", + dataset_service.DeleteDatasetRequest(), + name="name_value", ) @@ -1596,7 +1776,8 @@ def test_import_data( transport: str = "grpc", request_type=dataset_service.ImportDataRequest ): client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1629,7 +1810,8 @@ async def test_import_data_async( transport: str = "grpc_asyncio", request_type=dataset_service.ImportDataRequest ): client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1661,7 +1843,9 @@ async def test_import_data_async_from_dict(): def test_import_data_field_headers(): - client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1681,12 +1865,17 @@ def test_import_data_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_import_data_field_headers_async(): - client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1708,11 +1897,16 @@ async def test_import_data_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_import_data_flattened(): - client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.import_data), "__call__") as call: @@ -1741,7 +1935,9 @@ def test_import_data_flattened(): def test_import_data_flattened_error(): - client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1757,7 +1953,9 @@ def test_import_data_flattened_error(): @pytest.mark.asyncio async def test_import_data_flattened_async(): - client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.import_data), "__call__") as call: @@ -1790,7 +1988,9 @@ async def test_import_data_flattened_async(): @pytest.mark.asyncio async def test_import_data_flattened_error_async(): - client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1808,7 +2008,8 @@ def test_export_data( transport: str = "grpc", request_type=dataset_service.ExportDataRequest ): client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1841,7 +2042,8 @@ async def test_export_data_async( transport: str = "grpc_asyncio", request_type=dataset_service.ExportDataRequest ): client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1873,7 +2075,9 @@ async def test_export_data_async_from_dict(): def test_export_data_field_headers(): - client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1893,12 +2097,17 @@ def test_export_data_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_export_data_field_headers_async(): - client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1920,11 +2129,16 @@ async def test_export_data_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_export_data_flattened(): - client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.export_data), "__call__") as call: @@ -1957,7 +2171,9 @@ def test_export_data_flattened(): def test_export_data_flattened_error(): - client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1975,7 +2191,9 @@ def test_export_data_flattened_error(): @pytest.mark.asyncio async def test_export_data_flattened_async(): - client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.export_data), "__call__") as call: @@ -2012,7 +2230,9 @@ async def test_export_data_flattened_async(): @pytest.mark.asyncio async def test_export_data_flattened_error_async(): - client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2032,7 +2252,8 @@ def test_list_data_items( transport: str = "grpc", request_type=dataset_service.ListDataItemsRequest ): client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2070,7 +2291,8 @@ async def test_list_data_items_async( transport: str = "grpc_asyncio", request_type=dataset_service.ListDataItemsRequest ): client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2106,7 +2328,9 @@ async def test_list_data_items_async_from_dict(): def test_list_data_items_field_headers(): - client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2126,12 +2350,17 @@ def test_list_data_items_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_list_data_items_field_headers_async(): - client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2153,11 +2382,16 @@ async def test_list_data_items_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_list_data_items_flattened(): - client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_data_items), "__call__") as call: @@ -2166,7 +2400,9 @@ def test_list_data_items_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_data_items(parent="parent_value",) + client.list_data_items( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2177,19 +2413,24 @@ def test_list_data_items_flattened(): def test_list_data_items_flattened_error(): - client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_data_items( - dataset_service.ListDataItemsRequest(), parent="parent_value", + dataset_service.ListDataItemsRequest(), + parent="parent_value", ) @pytest.mark.asyncio async def test_list_data_items_flattened_async(): - client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_data_items), "__call__") as call: @@ -2201,7 +2442,9 @@ async def test_list_data_items_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_data_items(parent="parent_value",) + response = await client.list_data_items( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2213,18 +2456,23 @@ async def test_list_data_items_flattened_async(): @pytest.mark.asyncio async def test_list_data_items_flattened_error_async(): - client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_data_items( - dataset_service.ListDataItemsRequest(), parent="parent_value", + dataset_service.ListDataItemsRequest(), + parent="parent_value", ) def test_list_data_items_pager(): - client = DatasetServiceClient(credentials=credentials.AnonymousCredentials,) + client = DatasetServiceClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_data_items), "__call__") as call: @@ -2239,13 +2487,20 @@ def test_list_data_items_pager(): next_page_token="abc", ), dataset_service.ListDataItemsResponse( - data_items=[], next_page_token="def", + data_items=[], + next_page_token="def", ), dataset_service.ListDataItemsResponse( - data_items=[data_item.DataItem(),], next_page_token="ghi", + data_items=[ + data_item.DataItem(), + ], + next_page_token="ghi", ), dataset_service.ListDataItemsResponse( - data_items=[data_item.DataItem(), data_item.DataItem(),], + data_items=[ + data_item.DataItem(), + data_item.DataItem(), + ], ), RuntimeError, ) @@ -2264,7 +2519,9 @@ def test_list_data_items_pager(): def test_list_data_items_pages(): - client = DatasetServiceClient(credentials=credentials.AnonymousCredentials,) + client = DatasetServiceClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_data_items), "__call__") as call: @@ -2279,13 +2536,20 @@ def test_list_data_items_pages(): next_page_token="abc", ), dataset_service.ListDataItemsResponse( - data_items=[], next_page_token="def", + data_items=[], + next_page_token="def", ), dataset_service.ListDataItemsResponse( - data_items=[data_item.DataItem(),], next_page_token="ghi", + data_items=[ + data_item.DataItem(), + ], + next_page_token="ghi", ), dataset_service.ListDataItemsResponse( - data_items=[data_item.DataItem(), data_item.DataItem(),], + data_items=[ + data_item.DataItem(), + data_item.DataItem(), + ], ), RuntimeError, ) @@ -2296,7 +2560,9 @@ def test_list_data_items_pages(): @pytest.mark.asyncio async def test_list_data_items_async_pager(): - client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DatasetServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2313,17 +2579,26 @@ async def test_list_data_items_async_pager(): next_page_token="abc", ), dataset_service.ListDataItemsResponse( - data_items=[], next_page_token="def", + data_items=[], + next_page_token="def", ), dataset_service.ListDataItemsResponse( - data_items=[data_item.DataItem(),], next_page_token="ghi", + data_items=[ + data_item.DataItem(), + ], + next_page_token="ghi", ), dataset_service.ListDataItemsResponse( - data_items=[data_item.DataItem(), data_item.DataItem(),], + data_items=[ + data_item.DataItem(), + data_item.DataItem(), + ], ), RuntimeError, ) - async_pager = await client.list_data_items(request={},) + async_pager = await client.list_data_items( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -2335,7 +2610,9 @@ async def test_list_data_items_async_pager(): @pytest.mark.asyncio async def test_list_data_items_async_pages(): - client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DatasetServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2352,13 +2629,20 @@ async def test_list_data_items_async_pages(): next_page_token="abc", ), dataset_service.ListDataItemsResponse( - data_items=[], next_page_token="def", + data_items=[], + next_page_token="def", ), dataset_service.ListDataItemsResponse( - data_items=[data_item.DataItem(),], next_page_token="ghi", + data_items=[ + data_item.DataItem(), + ], + next_page_token="ghi", ), dataset_service.ListDataItemsResponse( - data_items=[data_item.DataItem(), data_item.DataItem(),], + data_items=[ + data_item.DataItem(), + data_item.DataItem(), + ], ), RuntimeError, ) @@ -2373,7 +2657,8 @@ def test_get_annotation_spec( transport: str = "grpc", request_type=dataset_service.GetAnnotationSpecRequest ): client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2386,7 +2671,9 @@ def test_get_annotation_spec( ) as call: # Designate an appropriate return value for the call. call.return_value = annotation_spec.AnnotationSpec( - name="name_value", display_name="display_name_value", etag="etag_value", + name="name_value", + display_name="display_name_value", + etag="etag_value", ) response = client.get_annotation_spec(request) @@ -2418,7 +2705,8 @@ async def test_get_annotation_spec_async( request_type=dataset_service.GetAnnotationSpecRequest, ): client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2432,7 +2720,9 @@ async def test_get_annotation_spec_async( # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( annotation_spec.AnnotationSpec( - name="name_value", display_name="display_name_value", etag="etag_value", + name="name_value", + display_name="display_name_value", + etag="etag_value", ) ) @@ -2460,7 +2750,9 @@ async def test_get_annotation_spec_async_from_dict(): def test_get_annotation_spec_field_headers(): - client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2482,12 +2774,17 @@ def test_get_annotation_spec_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_get_annotation_spec_field_headers_async(): - client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2511,11 +2808,16 @@ async def test_get_annotation_spec_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_get_annotation_spec_flattened(): - client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2526,7 +2828,9 @@ def test_get_annotation_spec_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_annotation_spec(name="name_value",) + client.get_annotation_spec( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2537,19 +2841,24 @@ def test_get_annotation_spec_flattened(): def test_get_annotation_spec_flattened_error(): - client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_annotation_spec( - dataset_service.GetAnnotationSpecRequest(), name="name_value", + dataset_service.GetAnnotationSpecRequest(), + name="name_value", ) @pytest.mark.asyncio async def test_get_annotation_spec_flattened_async(): - client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2563,7 +2872,9 @@ async def test_get_annotation_spec_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_annotation_spec(name="name_value",) + response = await client.get_annotation_spec( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2575,13 +2886,16 @@ async def test_get_annotation_spec_flattened_async(): @pytest.mark.asyncio async def test_get_annotation_spec_flattened_error_async(): - client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_annotation_spec( - dataset_service.GetAnnotationSpecRequest(), name="name_value", + dataset_service.GetAnnotationSpecRequest(), + name="name_value", ) @@ -2589,7 +2903,8 @@ def test_list_annotations( transport: str = "grpc", request_type=dataset_service.ListAnnotationsRequest ): client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2627,7 +2942,8 @@ async def test_list_annotations_async( transport: str = "grpc_asyncio", request_type=dataset_service.ListAnnotationsRequest ): client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2663,7 +2979,9 @@ async def test_list_annotations_async_from_dict(): def test_list_annotations_field_headers(): - client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2683,12 +3001,17 @@ def test_list_annotations_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_list_annotations_field_headers_async(): - client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2710,11 +3033,16 @@ async def test_list_annotations_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_list_annotations_flattened(): - client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_annotations), "__call__") as call: @@ -2723,7 +3051,9 @@ def test_list_annotations_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_annotations(parent="parent_value",) + client.list_annotations( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2734,19 +3064,24 @@ def test_list_annotations_flattened(): def test_list_annotations_flattened_error(): - client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_annotations( - dataset_service.ListAnnotationsRequest(), parent="parent_value", + dataset_service.ListAnnotationsRequest(), + parent="parent_value", ) @pytest.mark.asyncio async def test_list_annotations_flattened_async(): - client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_annotations), "__call__") as call: @@ -2758,7 +3093,9 @@ async def test_list_annotations_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_annotations(parent="parent_value",) + response = await client.list_annotations( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2770,18 +3107,23 @@ async def test_list_annotations_flattened_async(): @pytest.mark.asyncio async def test_list_annotations_flattened_error_async(): - client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DatasetServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_annotations( - dataset_service.ListAnnotationsRequest(), parent="parent_value", + dataset_service.ListAnnotationsRequest(), + parent="parent_value", ) def test_list_annotations_pager(): - client = DatasetServiceClient(credentials=credentials.AnonymousCredentials,) + client = DatasetServiceClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_annotations), "__call__") as call: @@ -2796,13 +3138,20 @@ def test_list_annotations_pager(): next_page_token="abc", ), dataset_service.ListAnnotationsResponse( - annotations=[], next_page_token="def", + annotations=[], + next_page_token="def", ), dataset_service.ListAnnotationsResponse( - annotations=[annotation.Annotation(),], next_page_token="ghi", + annotations=[ + annotation.Annotation(), + ], + next_page_token="ghi", ), dataset_service.ListAnnotationsResponse( - annotations=[annotation.Annotation(), annotation.Annotation(),], + annotations=[ + annotation.Annotation(), + annotation.Annotation(), + ], ), RuntimeError, ) @@ -2821,7 +3170,9 @@ def test_list_annotations_pager(): def test_list_annotations_pages(): - client = DatasetServiceClient(credentials=credentials.AnonymousCredentials,) + client = DatasetServiceClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_annotations), "__call__") as call: @@ -2836,13 +3187,20 @@ def test_list_annotations_pages(): next_page_token="abc", ), dataset_service.ListAnnotationsResponse( - annotations=[], next_page_token="def", + annotations=[], + next_page_token="def", ), dataset_service.ListAnnotationsResponse( - annotations=[annotation.Annotation(),], next_page_token="ghi", + annotations=[ + annotation.Annotation(), + ], + next_page_token="ghi", ), dataset_service.ListAnnotationsResponse( - annotations=[annotation.Annotation(), annotation.Annotation(),], + annotations=[ + annotation.Annotation(), + annotation.Annotation(), + ], ), RuntimeError, ) @@ -2853,7 +3211,9 @@ def test_list_annotations_pages(): @pytest.mark.asyncio async def test_list_annotations_async_pager(): - client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DatasetServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2870,17 +3230,26 @@ async def test_list_annotations_async_pager(): next_page_token="abc", ), dataset_service.ListAnnotationsResponse( - annotations=[], next_page_token="def", + annotations=[], + next_page_token="def", ), dataset_service.ListAnnotationsResponse( - annotations=[annotation.Annotation(),], next_page_token="ghi", + annotations=[ + annotation.Annotation(), + ], + next_page_token="ghi", ), dataset_service.ListAnnotationsResponse( - annotations=[annotation.Annotation(), annotation.Annotation(),], + annotations=[ + annotation.Annotation(), + annotation.Annotation(), + ], ), RuntimeError, ) - async_pager = await client.list_annotations(request={},) + async_pager = await client.list_annotations( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -2892,7 +3261,9 @@ async def test_list_annotations_async_pager(): @pytest.mark.asyncio async def test_list_annotations_async_pages(): - client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DatasetServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2909,13 +3280,20 @@ async def test_list_annotations_async_pages(): next_page_token="abc", ), dataset_service.ListAnnotationsResponse( - annotations=[], next_page_token="def", + annotations=[], + next_page_token="def", ), dataset_service.ListAnnotationsResponse( - annotations=[annotation.Annotation(),], next_page_token="ghi", + annotations=[ + annotation.Annotation(), + ], + next_page_token="ghi", ), dataset_service.ListAnnotationsResponse( - annotations=[annotation.Annotation(), annotation.Annotation(),], + annotations=[ + annotation.Annotation(), + annotation.Annotation(), + ], ), RuntimeError, ) @@ -2933,7 +3311,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -2952,7 +3331,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = DatasetServiceClient( - client_options={"scopes": ["1", "2"]}, transport=transport, + client_options={"scopes": ["1", "2"]}, + transport=transport, ) @@ -2997,8 +3377,13 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) - assert isinstance(client.transport, transports.DatasetServiceGrpcTransport,) + client = DatasetServiceClient( + credentials=credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DatasetServiceGrpcTransport, + ) def test_dataset_service_base_transport_error(): @@ -3054,7 +3439,8 @@ def test_dataset_service_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (credentials.AnonymousCredentials(), None) transport = transports.DatasetServiceTransport( - credentials_file="credentials.json", quota_project_id="octopus", + credentials_file="credentials.json", + quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -3124,7 +3510,8 @@ def test_dataset_service_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.DatasetServiceGrpcTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -3136,7 +3523,8 @@ def test_dataset_service_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.DatasetServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -3233,12 +3621,16 @@ def test_dataset_service_transport_channel_mtls_with_adc(transport_class): def test_dataset_service_grpc_lro_client(): client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=credentials.AnonymousCredentials(), + transport="grpc", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance(transport.operations_client, operations_v1.OperationsClient,) + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -3246,12 +3638,16 @@ def test_dataset_service_grpc_lro_client(): def test_dataset_service_grpc_lro_async_client(): client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -3331,7 +3727,10 @@ def test_data_item_path(): data_item = "nautilus" expected = "projects/{project}/locations/{location}/datasets/{dataset}/dataItems/{data_item}".format( - project=project, location=location, dataset=dataset, data_item=data_item, + project=project, + location=location, + dataset=dataset, + data_item=data_item, ) actual = DatasetServiceClient.data_item_path(project, location, dataset, data_item) assert expected == actual @@ -3357,7 +3756,9 @@ def test_dataset_path(): dataset = "oyster" expected = "projects/{project}/locations/{location}/datasets/{dataset}".format( - project=project, location=location, dataset=dataset, + project=project, + location=location, + dataset=dataset, ) actual = DatasetServiceClient.dataset_path(project, location, dataset) assert expected == actual @@ -3400,7 +3801,9 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "scallop" - expected = "folders/{folder}".format(folder=folder,) + expected = "folders/{folder}".format( + folder=folder, + ) actual = DatasetServiceClient.common_folder_path(folder) assert expected == actual @@ -3419,7 +3822,9 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "squid" - expected = "organizations/{organization}".format(organization=organization,) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = DatasetServiceClient.common_organization_path(organization) assert expected == actual @@ -3438,7 +3843,9 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "whelk" - expected = "projects/{project}".format(project=project,) + expected = "projects/{project}".format( + project=project, + ) actual = DatasetServiceClient.common_project_path(project) assert expected == actual @@ -3459,7 +3866,8 @@ def test_common_location_path(): location = "nudibranch" expected = "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) actual = DatasetServiceClient.common_location_path(project, location) assert expected == actual @@ -3484,7 +3892,8 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.DatasetServiceTransport, "_prep_wrapped_messages" ) as prep: client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -3493,6 +3902,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = DatasetServiceClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_endpoint_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_endpoint_service.py index 93c35a7a2a..8994b2c8be 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_endpoint_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_endpoint_service.py @@ -401,7 +401,9 @@ def test_endpoint_service_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions(scopes=["1", "2"],) + options = client_options.ClientOptions( + scopes=["1", "2"], + ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) @@ -469,7 +471,8 @@ def test_create_endpoint( transport: str = "grpc", request_type=endpoint_service.CreateEndpointRequest ): client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -502,7 +505,8 @@ async def test_create_endpoint_async( transport: str = "grpc_asyncio", request_type=endpoint_service.CreateEndpointRequest ): client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -534,7 +538,9 @@ async def test_create_endpoint_async_from_dict(): def test_create_endpoint_field_headers(): - client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -554,12 +560,17 @@ def test_create_endpoint_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_create_endpoint_field_headers_async(): - client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -581,11 +592,16 @@ async def test_create_endpoint_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_create_endpoint_flattened(): - client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_endpoint), "__call__") as call: @@ -595,7 +611,8 @@ def test_create_endpoint_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_endpoint( - parent="parent_value", endpoint=gca_endpoint.Endpoint(name="name_value"), + parent="parent_value", + endpoint=gca_endpoint.Endpoint(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -609,7 +626,9 @@ def test_create_endpoint_flattened(): def test_create_endpoint_flattened_error(): - client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -623,7 +642,9 @@ def test_create_endpoint_flattened_error(): @pytest.mark.asyncio async def test_create_endpoint_flattened_async(): - client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_endpoint), "__call__") as call: @@ -636,7 +657,8 @@ async def test_create_endpoint_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_endpoint( - parent="parent_value", endpoint=gca_endpoint.Endpoint(name="name_value"), + parent="parent_value", + endpoint=gca_endpoint.Endpoint(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -651,7 +673,9 @@ async def test_create_endpoint_flattened_async(): @pytest.mark.asyncio async def test_create_endpoint_flattened_error_async(): - client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -667,7 +691,8 @@ def test_get_endpoint( transport: str = "grpc", request_type=endpoint_service.GetEndpointRequest ): client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -714,7 +739,8 @@ async def test_get_endpoint_async( transport: str = "grpc_asyncio", request_type=endpoint_service.GetEndpointRequest ): client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -759,7 +785,9 @@ async def test_get_endpoint_async_from_dict(): def test_get_endpoint_field_headers(): - client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -779,12 +807,17 @@ def test_get_endpoint_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_get_endpoint_field_headers_async(): - client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -804,11 +837,16 @@ async def test_get_endpoint_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_get_endpoint_flattened(): - client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_endpoint), "__call__") as call: @@ -817,7 +855,9 @@ def test_get_endpoint_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_endpoint(name="name_value",) + client.get_endpoint( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -828,19 +868,24 @@ def test_get_endpoint_flattened(): def test_get_endpoint_flattened_error(): - client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_endpoint( - endpoint_service.GetEndpointRequest(), name="name_value", + endpoint_service.GetEndpointRequest(), + name="name_value", ) @pytest.mark.asyncio async def test_get_endpoint_flattened_async(): - client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_endpoint), "__call__") as call: @@ -850,7 +895,9 @@ async def test_get_endpoint_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(endpoint.Endpoint()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_endpoint(name="name_value",) + response = await client.get_endpoint( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -862,13 +909,16 @@ async def test_get_endpoint_flattened_async(): @pytest.mark.asyncio async def test_get_endpoint_flattened_error_async(): - client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_endpoint( - endpoint_service.GetEndpointRequest(), name="name_value", + endpoint_service.GetEndpointRequest(), + name="name_value", ) @@ -876,7 +926,8 @@ def test_list_endpoints( transport: str = "grpc", request_type=endpoint_service.ListEndpointsRequest ): client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -914,7 +965,8 @@ async def test_list_endpoints_async( transport: str = "grpc_asyncio", request_type=endpoint_service.ListEndpointsRequest ): client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -950,7 +1002,9 @@ async def test_list_endpoints_async_from_dict(): def test_list_endpoints_field_headers(): - client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -970,12 +1024,17 @@ def test_list_endpoints_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_list_endpoints_field_headers_async(): - client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -997,11 +1056,16 @@ async def test_list_endpoints_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_list_endpoints_flattened(): - client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_endpoints), "__call__") as call: @@ -1010,7 +1074,9 @@ def test_list_endpoints_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_endpoints(parent="parent_value",) + client.list_endpoints( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1021,19 +1087,24 @@ def test_list_endpoints_flattened(): def test_list_endpoints_flattened_error(): - client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_endpoints( - endpoint_service.ListEndpointsRequest(), parent="parent_value", + endpoint_service.ListEndpointsRequest(), + parent="parent_value", ) @pytest.mark.asyncio async def test_list_endpoints_flattened_async(): - client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_endpoints), "__call__") as call: @@ -1045,7 +1116,9 @@ async def test_list_endpoints_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_endpoints(parent="parent_value",) + response = await client.list_endpoints( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1057,18 +1130,23 @@ async def test_list_endpoints_flattened_async(): @pytest.mark.asyncio async def test_list_endpoints_flattened_error_async(): - client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_endpoints( - endpoint_service.ListEndpointsRequest(), parent="parent_value", + endpoint_service.ListEndpointsRequest(), + parent="parent_value", ) def test_list_endpoints_pager(): - client = EndpointServiceClient(credentials=credentials.AnonymousCredentials,) + client = EndpointServiceClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_endpoints), "__call__") as call: @@ -1083,13 +1161,20 @@ def test_list_endpoints_pager(): next_page_token="abc", ), endpoint_service.ListEndpointsResponse( - endpoints=[], next_page_token="def", + endpoints=[], + next_page_token="def", ), endpoint_service.ListEndpointsResponse( - endpoints=[endpoint.Endpoint(),], next_page_token="ghi", + endpoints=[ + endpoint.Endpoint(), + ], + next_page_token="ghi", ), endpoint_service.ListEndpointsResponse( - endpoints=[endpoint.Endpoint(), endpoint.Endpoint(),], + endpoints=[ + endpoint.Endpoint(), + endpoint.Endpoint(), + ], ), RuntimeError, ) @@ -1108,7 +1193,9 @@ def test_list_endpoints_pager(): def test_list_endpoints_pages(): - client = EndpointServiceClient(credentials=credentials.AnonymousCredentials,) + client = EndpointServiceClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_endpoints), "__call__") as call: @@ -1123,13 +1210,20 @@ def test_list_endpoints_pages(): next_page_token="abc", ), endpoint_service.ListEndpointsResponse( - endpoints=[], next_page_token="def", + endpoints=[], + next_page_token="def", ), endpoint_service.ListEndpointsResponse( - endpoints=[endpoint.Endpoint(),], next_page_token="ghi", + endpoints=[ + endpoint.Endpoint(), + ], + next_page_token="ghi", ), endpoint_service.ListEndpointsResponse( - endpoints=[endpoint.Endpoint(), endpoint.Endpoint(),], + endpoints=[ + endpoint.Endpoint(), + endpoint.Endpoint(), + ], ), RuntimeError, ) @@ -1140,7 +1234,9 @@ def test_list_endpoints_pages(): @pytest.mark.asyncio async def test_list_endpoints_async_pager(): - client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = EndpointServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1157,17 +1253,26 @@ async def test_list_endpoints_async_pager(): next_page_token="abc", ), endpoint_service.ListEndpointsResponse( - endpoints=[], next_page_token="def", + endpoints=[], + next_page_token="def", ), endpoint_service.ListEndpointsResponse( - endpoints=[endpoint.Endpoint(),], next_page_token="ghi", + endpoints=[ + endpoint.Endpoint(), + ], + next_page_token="ghi", ), endpoint_service.ListEndpointsResponse( - endpoints=[endpoint.Endpoint(), endpoint.Endpoint(),], + endpoints=[ + endpoint.Endpoint(), + endpoint.Endpoint(), + ], ), RuntimeError, ) - async_pager = await client.list_endpoints(request={},) + async_pager = await client.list_endpoints( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -1179,7 +1284,9 @@ async def test_list_endpoints_async_pager(): @pytest.mark.asyncio async def test_list_endpoints_async_pages(): - client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = EndpointServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1196,13 +1303,20 @@ async def test_list_endpoints_async_pages(): next_page_token="abc", ), endpoint_service.ListEndpointsResponse( - endpoints=[], next_page_token="def", + endpoints=[], + next_page_token="def", ), endpoint_service.ListEndpointsResponse( - endpoints=[endpoint.Endpoint(),], next_page_token="ghi", + endpoints=[ + endpoint.Endpoint(), + ], + next_page_token="ghi", ), endpoint_service.ListEndpointsResponse( - endpoints=[endpoint.Endpoint(), endpoint.Endpoint(),], + endpoints=[ + endpoint.Endpoint(), + endpoint.Endpoint(), + ], ), RuntimeError, ) @@ -1217,7 +1331,8 @@ def test_update_endpoint( transport: str = "grpc", request_type=endpoint_service.UpdateEndpointRequest ): client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1264,7 +1379,8 @@ async def test_update_endpoint_async( transport: str = "grpc_asyncio", request_type=endpoint_service.UpdateEndpointRequest ): client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1309,7 +1425,9 @@ async def test_update_endpoint_async_from_dict(): def test_update_endpoint_field_headers(): - client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1329,14 +1447,17 @@ def test_update_endpoint_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "endpoint.name=endpoint.name/value",) in kw[ - "metadata" - ] + assert ( + "x-goog-request-params", + "endpoint.name=endpoint.name/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_update_endpoint_field_headers_async(): - client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1358,13 +1479,16 @@ async def test_update_endpoint_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "endpoint.name=endpoint.name/value",) in kw[ - "metadata" - ] + assert ( + "x-goog-request-params", + "endpoint.name=endpoint.name/value", + ) in kw["metadata"] def test_update_endpoint_flattened(): - client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_endpoint), "__call__") as call: @@ -1389,7 +1513,9 @@ def test_update_endpoint_flattened(): def test_update_endpoint_flattened_error(): - client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1403,7 +1529,9 @@ def test_update_endpoint_flattened_error(): @pytest.mark.asyncio async def test_update_endpoint_flattened_async(): - client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_endpoint), "__call__") as call: @@ -1432,7 +1560,9 @@ async def test_update_endpoint_flattened_async(): @pytest.mark.asyncio async def test_update_endpoint_flattened_error_async(): - client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1448,7 +1578,8 @@ def test_delete_endpoint( transport: str = "grpc", request_type=endpoint_service.DeleteEndpointRequest ): client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1481,7 +1612,8 @@ async def test_delete_endpoint_async( transport: str = "grpc_asyncio", request_type=endpoint_service.DeleteEndpointRequest ): client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1513,7 +1645,9 @@ async def test_delete_endpoint_async_from_dict(): def test_delete_endpoint_field_headers(): - client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1533,12 +1667,17 @@ def test_delete_endpoint_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_delete_endpoint_field_headers_async(): - client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1560,11 +1699,16 @@ async def test_delete_endpoint_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_delete_endpoint_flattened(): - client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_endpoint), "__call__") as call: @@ -1573,7 +1717,9 @@ def test_delete_endpoint_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_endpoint(name="name_value",) + client.delete_endpoint( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1584,19 +1730,24 @@ def test_delete_endpoint_flattened(): def test_delete_endpoint_flattened_error(): - client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_endpoint( - endpoint_service.DeleteEndpointRequest(), name="name_value", + endpoint_service.DeleteEndpointRequest(), + name="name_value", ) @pytest.mark.asyncio async def test_delete_endpoint_flattened_async(): - client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_endpoint), "__call__") as call: @@ -1608,7 +1759,9 @@ async def test_delete_endpoint_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_endpoint(name="name_value",) + response = await client.delete_endpoint( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1620,13 +1773,16 @@ async def test_delete_endpoint_flattened_async(): @pytest.mark.asyncio async def test_delete_endpoint_flattened_error_async(): - client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_endpoint( - endpoint_service.DeleteEndpointRequest(), name="name_value", + endpoint_service.DeleteEndpointRequest(), + name="name_value", ) @@ -1634,7 +1790,8 @@ def test_deploy_model( transport: str = "grpc", request_type=endpoint_service.DeployModelRequest ): client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1667,7 +1824,8 @@ async def test_deploy_model_async( transport: str = "grpc_asyncio", request_type=endpoint_service.DeployModelRequest ): client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1699,7 +1857,9 @@ async def test_deploy_model_async_from_dict(): def test_deploy_model_field_headers(): - client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1719,12 +1879,17 @@ def test_deploy_model_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "endpoint=endpoint/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "endpoint=endpoint/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_deploy_model_field_headers_async(): - client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1746,11 +1911,16 @@ async def test_deploy_model_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "endpoint=endpoint/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "endpoint=endpoint/value", + ) in kw["metadata"] def test_deploy_model_flattened(): - client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.deploy_model), "__call__") as call: @@ -1790,7 +1960,9 @@ def test_deploy_model_flattened(): def test_deploy_model_flattened_error(): - client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1811,7 +1983,9 @@ def test_deploy_model_flattened_error(): @pytest.mark.asyncio async def test_deploy_model_flattened_async(): - client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.deploy_model), "__call__") as call: @@ -1855,7 +2029,9 @@ async def test_deploy_model_flattened_async(): @pytest.mark.asyncio async def test_deploy_model_flattened_error_async(): - client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1878,7 +2054,8 @@ def test_undeploy_model( transport: str = "grpc", request_type=endpoint_service.UndeployModelRequest ): client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1911,7 +2088,8 @@ async def test_undeploy_model_async( transport: str = "grpc_asyncio", request_type=endpoint_service.UndeployModelRequest ): client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1943,7 +2121,9 @@ async def test_undeploy_model_async_from_dict(): def test_undeploy_model_field_headers(): - client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1963,12 +2143,17 @@ def test_undeploy_model_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "endpoint=endpoint/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "endpoint=endpoint/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_undeploy_model_field_headers_async(): - client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1990,11 +2175,16 @@ async def test_undeploy_model_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "endpoint=endpoint/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "endpoint=endpoint/value", + ) in kw["metadata"] def test_undeploy_model_flattened(): - client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.undeploy_model), "__call__") as call: @@ -2022,7 +2212,9 @@ def test_undeploy_model_flattened(): def test_undeploy_model_flattened_error(): - client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2037,7 +2229,9 @@ def test_undeploy_model_flattened_error(): @pytest.mark.asyncio async def test_undeploy_model_flattened_async(): - client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.undeploy_model), "__call__") as call: @@ -2069,7 +2263,9 @@ async def test_undeploy_model_flattened_async(): @pytest.mark.asyncio async def test_undeploy_model_flattened_error_async(): - client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = EndpointServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2089,7 +2285,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -2108,7 +2305,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = EndpointServiceClient( - client_options={"scopes": ["1", "2"]}, transport=transport, + client_options={"scopes": ["1", "2"]}, + transport=transport, ) @@ -2153,8 +2351,13 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) - assert isinstance(client.transport, transports.EndpointServiceGrpcTransport,) + client = EndpointServiceClient( + credentials=credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.EndpointServiceGrpcTransport, + ) def test_endpoint_service_base_transport_error(): @@ -2207,7 +2410,8 @@ def test_endpoint_service_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (credentials.AnonymousCredentials(), None) transport = transports.EndpointServiceTransport( - credentials_file="credentials.json", quota_project_id="octopus", + credentials_file="credentials.json", + quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -2277,7 +2481,8 @@ def test_endpoint_service_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.EndpointServiceGrpcTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2289,7 +2494,8 @@ def test_endpoint_service_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.EndpointServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2386,12 +2592,16 @@ def test_endpoint_service_transport_channel_mtls_with_adc(transport_class): def test_endpoint_service_grpc_lro_client(): client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=credentials.AnonymousCredentials(), + transport="grpc", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance(transport.operations_client, operations_v1.OperationsClient,) + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -2399,12 +2609,16 @@ def test_endpoint_service_grpc_lro_client(): def test_endpoint_service_grpc_lro_async_client(): client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -2416,7 +2630,9 @@ def test_endpoint_path(): endpoint = "whelk" expected = "projects/{project}/locations/{location}/endpoints/{endpoint}".format( - project=project, location=location, endpoint=endpoint, + project=project, + location=location, + endpoint=endpoint, ) actual = EndpointServiceClient.endpoint_path(project, location, endpoint) assert expected == actual @@ -2441,7 +2657,9 @@ def test_model_path(): model = "winkle" expected = "projects/{project}/locations/{location}/models/{model}".format( - project=project, location=location, model=model, + project=project, + location=location, + model=model, ) actual = EndpointServiceClient.model_path(project, location, model) assert expected == actual @@ -2484,7 +2702,9 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format(folder=folder,) + expected = "folders/{folder}".format( + folder=folder, + ) actual = EndpointServiceClient.common_folder_path(folder) assert expected == actual @@ -2503,7 +2723,9 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format(organization=organization,) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = EndpointServiceClient.common_organization_path(organization) assert expected == actual @@ -2522,7 +2744,9 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format(project=project,) + expected = "projects/{project}".format( + project=project, + ) actual = EndpointServiceClient.common_project_path(project) assert expected == actual @@ -2543,7 +2767,8 @@ def test_common_location_path(): location = "nautilus" expected = "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) actual = EndpointServiceClient.common_location_path(project, location) assert expected == actual @@ -2568,7 +2793,8 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.EndpointServiceTransport, "_prep_wrapped_messages" ) as prep: client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2577,6 +2803,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = EndpointServiceClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_job_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_job_service.py index f08d84bd2f..f99ac1ce5d 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_job_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_job_service.py @@ -399,7 +399,9 @@ def test_job_service_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions(scopes=["1", "2"],) + options = client_options.ClientOptions( + scopes=["1", "2"], + ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) @@ -465,7 +467,8 @@ def test_create_custom_job( transport: str = "grpc", request_type=job_service.CreateCustomJobRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -511,7 +514,8 @@ async def test_create_custom_job_async( transport: str = "grpc_asyncio", request_type=job_service.CreateCustomJobRequest ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -555,7 +559,9 @@ async def test_create_custom_job_async_from_dict(): def test_create_custom_job_field_headers(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -577,12 +583,17 @@ def test_create_custom_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_create_custom_job_field_headers_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -606,11 +617,16 @@ async def test_create_custom_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_create_custom_job_flattened(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -637,7 +653,9 @@ def test_create_custom_job_flattened(): def test_create_custom_job_flattened_error(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -651,7 +669,9 @@ def test_create_custom_job_flattened_error(): @pytest.mark.asyncio async def test_create_custom_job_flattened_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -682,7 +702,9 @@ async def test_create_custom_job_flattened_async(): @pytest.mark.asyncio async def test_create_custom_job_flattened_error_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -698,7 +720,8 @@ def test_get_custom_job( transport: str = "grpc", request_type=job_service.GetCustomJobRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -742,7 +765,8 @@ async def test_get_custom_job_async( transport: str = "grpc_asyncio", request_type=job_service.GetCustomJobRequest ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -784,7 +808,9 @@ async def test_get_custom_job_async_from_dict(): def test_get_custom_job_field_headers(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -804,12 +830,17 @@ def test_get_custom_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_get_custom_job_field_headers_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -831,11 +862,16 @@ async def test_get_custom_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_get_custom_job_flattened(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_custom_job), "__call__") as call: @@ -844,7 +880,9 @@ def test_get_custom_job_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_custom_job(name="name_value",) + client.get_custom_job( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -855,19 +893,24 @@ def test_get_custom_job_flattened(): def test_get_custom_job_flattened_error(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_custom_job( - job_service.GetCustomJobRequest(), name="name_value", + job_service.GetCustomJobRequest(), + name="name_value", ) @pytest.mark.asyncio async def test_get_custom_job_flattened_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_custom_job), "__call__") as call: @@ -879,7 +922,9 @@ async def test_get_custom_job_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_custom_job(name="name_value",) + response = await client.get_custom_job( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -891,13 +936,16 @@ async def test_get_custom_job_flattened_async(): @pytest.mark.asyncio async def test_get_custom_job_flattened_error_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_custom_job( - job_service.GetCustomJobRequest(), name="name_value", + job_service.GetCustomJobRequest(), + name="name_value", ) @@ -905,7 +953,8 @@ def test_list_custom_jobs( transport: str = "grpc", request_type=job_service.ListCustomJobsRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -943,7 +992,8 @@ async def test_list_custom_jobs_async( transport: str = "grpc_asyncio", request_type=job_service.ListCustomJobsRequest ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -954,7 +1004,9 @@ async def test_list_custom_jobs_async( with mock.patch.object(type(client.transport.list_custom_jobs), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - job_service.ListCustomJobsResponse(next_page_token="next_page_token_value",) + job_service.ListCustomJobsResponse( + next_page_token="next_page_token_value", + ) ) response = await client.list_custom_jobs(request) @@ -977,7 +1029,9 @@ async def test_list_custom_jobs_async_from_dict(): def test_list_custom_jobs_field_headers(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -997,12 +1051,17 @@ def test_list_custom_jobs_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_list_custom_jobs_field_headers_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1024,11 +1083,16 @@ async def test_list_custom_jobs_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_list_custom_jobs_flattened(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_custom_jobs), "__call__") as call: @@ -1037,7 +1101,9 @@ def test_list_custom_jobs_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_custom_jobs(parent="parent_value",) + client.list_custom_jobs( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1048,19 +1114,24 @@ def test_list_custom_jobs_flattened(): def test_list_custom_jobs_flattened_error(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_custom_jobs( - job_service.ListCustomJobsRequest(), parent="parent_value", + job_service.ListCustomJobsRequest(), + parent="parent_value", ) @pytest.mark.asyncio async def test_list_custom_jobs_flattened_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_custom_jobs), "__call__") as call: @@ -1072,7 +1143,9 @@ async def test_list_custom_jobs_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_custom_jobs(parent="parent_value",) + response = await client.list_custom_jobs( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1084,18 +1157,23 @@ async def test_list_custom_jobs_flattened_async(): @pytest.mark.asyncio async def test_list_custom_jobs_flattened_error_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_custom_jobs( - job_service.ListCustomJobsRequest(), parent="parent_value", + job_service.ListCustomJobsRequest(), + parent="parent_value", ) def test_list_custom_jobs_pager(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials,) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_custom_jobs), "__call__") as call: @@ -1109,12 +1187,21 @@ def test_list_custom_jobs_pager(): ], next_page_token="abc", ), - job_service.ListCustomJobsResponse(custom_jobs=[], next_page_token="def",), job_service.ListCustomJobsResponse( - custom_jobs=[custom_job.CustomJob(),], next_page_token="ghi", + custom_jobs=[], + next_page_token="def", ), job_service.ListCustomJobsResponse( - custom_jobs=[custom_job.CustomJob(), custom_job.CustomJob(),], + custom_jobs=[ + custom_job.CustomJob(), + ], + next_page_token="ghi", + ), + job_service.ListCustomJobsResponse( + custom_jobs=[ + custom_job.CustomJob(), + custom_job.CustomJob(), + ], ), RuntimeError, ) @@ -1133,7 +1220,9 @@ def test_list_custom_jobs_pager(): def test_list_custom_jobs_pages(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials,) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_custom_jobs), "__call__") as call: @@ -1147,12 +1236,21 @@ def test_list_custom_jobs_pages(): ], next_page_token="abc", ), - job_service.ListCustomJobsResponse(custom_jobs=[], next_page_token="def",), job_service.ListCustomJobsResponse( - custom_jobs=[custom_job.CustomJob(),], next_page_token="ghi", + custom_jobs=[], + next_page_token="def", ), job_service.ListCustomJobsResponse( - custom_jobs=[custom_job.CustomJob(), custom_job.CustomJob(),], + custom_jobs=[ + custom_job.CustomJob(), + ], + next_page_token="ghi", + ), + job_service.ListCustomJobsResponse( + custom_jobs=[ + custom_job.CustomJob(), + custom_job.CustomJob(), + ], ), RuntimeError, ) @@ -1163,7 +1261,9 @@ def test_list_custom_jobs_pages(): @pytest.mark.asyncio async def test_list_custom_jobs_async_pager(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1179,16 +1279,27 @@ async def test_list_custom_jobs_async_pager(): ], next_page_token="abc", ), - job_service.ListCustomJobsResponse(custom_jobs=[], next_page_token="def",), job_service.ListCustomJobsResponse( - custom_jobs=[custom_job.CustomJob(),], next_page_token="ghi", + custom_jobs=[], + next_page_token="def", + ), + job_service.ListCustomJobsResponse( + custom_jobs=[ + custom_job.CustomJob(), + ], + next_page_token="ghi", ), job_service.ListCustomJobsResponse( - custom_jobs=[custom_job.CustomJob(), custom_job.CustomJob(),], + custom_jobs=[ + custom_job.CustomJob(), + custom_job.CustomJob(), + ], ), RuntimeError, ) - async_pager = await client.list_custom_jobs(request={},) + async_pager = await client.list_custom_jobs( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -1200,7 +1311,9 @@ async def test_list_custom_jobs_async_pager(): @pytest.mark.asyncio async def test_list_custom_jobs_async_pages(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1216,12 +1329,21 @@ async def test_list_custom_jobs_async_pages(): ], next_page_token="abc", ), - job_service.ListCustomJobsResponse(custom_jobs=[], next_page_token="def",), job_service.ListCustomJobsResponse( - custom_jobs=[custom_job.CustomJob(),], next_page_token="ghi", + custom_jobs=[], + next_page_token="def", ), job_service.ListCustomJobsResponse( - custom_jobs=[custom_job.CustomJob(), custom_job.CustomJob(),], + custom_jobs=[ + custom_job.CustomJob(), + ], + next_page_token="ghi", + ), + job_service.ListCustomJobsResponse( + custom_jobs=[ + custom_job.CustomJob(), + custom_job.CustomJob(), + ], ), RuntimeError, ) @@ -1236,7 +1358,8 @@ def test_delete_custom_job( transport: str = "grpc", request_type=job_service.DeleteCustomJobRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1271,7 +1394,8 @@ async def test_delete_custom_job_async( transport: str = "grpc_asyncio", request_type=job_service.DeleteCustomJobRequest ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1305,7 +1429,9 @@ async def test_delete_custom_job_async_from_dict(): def test_delete_custom_job_field_headers(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1327,12 +1453,17 @@ def test_delete_custom_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_delete_custom_job_field_headers_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1356,11 +1487,16 @@ async def test_delete_custom_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_delete_custom_job_flattened(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1371,7 +1507,9 @@ def test_delete_custom_job_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_custom_job(name="name_value",) + client.delete_custom_job( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1382,19 +1520,24 @@ def test_delete_custom_job_flattened(): def test_delete_custom_job_flattened_error(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_custom_job( - job_service.DeleteCustomJobRequest(), name="name_value", + job_service.DeleteCustomJobRequest(), + name="name_value", ) @pytest.mark.asyncio async def test_delete_custom_job_flattened_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1408,7 +1551,9 @@ async def test_delete_custom_job_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_custom_job(name="name_value",) + response = await client.delete_custom_job( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1420,13 +1565,16 @@ async def test_delete_custom_job_flattened_async(): @pytest.mark.asyncio async def test_delete_custom_job_flattened_error_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_custom_job( - job_service.DeleteCustomJobRequest(), name="name_value", + job_service.DeleteCustomJobRequest(), + name="name_value", ) @@ -1434,7 +1582,8 @@ def test_cancel_custom_job( transport: str = "grpc", request_type=job_service.CancelCustomJobRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1469,7 +1618,8 @@ async def test_cancel_custom_job_async( transport: str = "grpc_asyncio", request_type=job_service.CancelCustomJobRequest ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1501,7 +1651,9 @@ async def test_cancel_custom_job_async_from_dict(): def test_cancel_custom_job_field_headers(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1523,12 +1675,17 @@ def test_cancel_custom_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_cancel_custom_job_field_headers_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1550,11 +1707,16 @@ async def test_cancel_custom_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_cancel_custom_job_flattened(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1565,7 +1727,9 @@ def test_cancel_custom_job_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.cancel_custom_job(name="name_value",) + client.cancel_custom_job( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1576,19 +1740,24 @@ def test_cancel_custom_job_flattened(): def test_cancel_custom_job_flattened_error(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.cancel_custom_job( - job_service.CancelCustomJobRequest(), name="name_value", + job_service.CancelCustomJobRequest(), + name="name_value", ) @pytest.mark.asyncio async def test_cancel_custom_job_flattened_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1600,7 +1769,9 @@ async def test_cancel_custom_job_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.cancel_custom_job(name="name_value",) + response = await client.cancel_custom_job( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1612,13 +1783,16 @@ async def test_cancel_custom_job_flattened_async(): @pytest.mark.asyncio async def test_cancel_custom_job_flattened_error_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.cancel_custom_job( - job_service.CancelCustomJobRequest(), name="name_value", + job_service.CancelCustomJobRequest(), + name="name_value", ) @@ -1626,7 +1800,8 @@ def test_create_data_labeling_job( transport: str = "grpc", request_type=job_service.CreateDataLabelingJobRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1691,7 +1866,8 @@ async def test_create_data_labeling_job_async( request_type=job_service.CreateDataLabelingJobRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1753,7 +1929,9 @@ async def test_create_data_labeling_job_async_from_dict(): def test_create_data_labeling_job_field_headers(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1775,12 +1953,17 @@ def test_create_data_labeling_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_create_data_labeling_job_field_headers_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1804,11 +1987,16 @@ async def test_create_data_labeling_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_create_data_labeling_job_flattened(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1837,7 +2025,9 @@ def test_create_data_labeling_job_flattened(): def test_create_data_labeling_job_flattened_error(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1851,7 +2041,9 @@ def test_create_data_labeling_job_flattened_error(): @pytest.mark.asyncio async def test_create_data_labeling_job_flattened_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1884,7 +2076,9 @@ async def test_create_data_labeling_job_flattened_async(): @pytest.mark.asyncio async def test_create_data_labeling_job_flattened_error_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1900,7 +2094,8 @@ def test_get_data_labeling_job( transport: str = "grpc", request_type=job_service.GetDataLabelingJobRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1964,7 +2159,8 @@ async def test_get_data_labeling_job_async( transport: str = "grpc_asyncio", request_type=job_service.GetDataLabelingJobRequest ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2026,7 +2222,9 @@ async def test_get_data_labeling_job_async_from_dict(): def test_get_data_labeling_job_field_headers(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2048,12 +2246,17 @@ def test_get_data_labeling_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_get_data_labeling_job_field_headers_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2077,11 +2280,16 @@ async def test_get_data_labeling_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_get_data_labeling_job_flattened(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2092,7 +2300,9 @@ def test_get_data_labeling_job_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_data_labeling_job(name="name_value",) + client.get_data_labeling_job( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2103,19 +2313,24 @@ def test_get_data_labeling_job_flattened(): def test_get_data_labeling_job_flattened_error(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_data_labeling_job( - job_service.GetDataLabelingJobRequest(), name="name_value", + job_service.GetDataLabelingJobRequest(), + name="name_value", ) @pytest.mark.asyncio async def test_get_data_labeling_job_flattened_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2129,7 +2344,9 @@ async def test_get_data_labeling_job_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_data_labeling_job(name="name_value",) + response = await client.get_data_labeling_job( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2141,13 +2358,16 @@ async def test_get_data_labeling_job_flattened_async(): @pytest.mark.asyncio async def test_get_data_labeling_job_flattened_error_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_data_labeling_job( - job_service.GetDataLabelingJobRequest(), name="name_value", + job_service.GetDataLabelingJobRequest(), + name="name_value", ) @@ -2155,7 +2375,8 @@ def test_list_data_labeling_jobs( transport: str = "grpc", request_type=job_service.ListDataLabelingJobsRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2196,7 +2417,8 @@ async def test_list_data_labeling_jobs_async( request_type=job_service.ListDataLabelingJobsRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2234,7 +2456,9 @@ async def test_list_data_labeling_jobs_async_from_dict(): def test_list_data_labeling_jobs_field_headers(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2256,12 +2480,17 @@ def test_list_data_labeling_jobs_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_list_data_labeling_jobs_field_headers_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2285,11 +2514,16 @@ async def test_list_data_labeling_jobs_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_list_data_labeling_jobs_flattened(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2300,7 +2534,9 @@ def test_list_data_labeling_jobs_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_data_labeling_jobs(parent="parent_value",) + client.list_data_labeling_jobs( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2311,19 +2547,24 @@ def test_list_data_labeling_jobs_flattened(): def test_list_data_labeling_jobs_flattened_error(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_data_labeling_jobs( - job_service.ListDataLabelingJobsRequest(), parent="parent_value", + job_service.ListDataLabelingJobsRequest(), + parent="parent_value", ) @pytest.mark.asyncio async def test_list_data_labeling_jobs_flattened_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2337,7 +2578,9 @@ async def test_list_data_labeling_jobs_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_data_labeling_jobs(parent="parent_value",) + response = await client.list_data_labeling_jobs( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2349,18 +2592,23 @@ async def test_list_data_labeling_jobs_flattened_async(): @pytest.mark.asyncio async def test_list_data_labeling_jobs_flattened_error_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_data_labeling_jobs( - job_service.ListDataLabelingJobsRequest(), parent="parent_value", + job_service.ListDataLabelingJobsRequest(), + parent="parent_value", ) def test_list_data_labeling_jobs_pager(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials,) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2377,10 +2625,13 @@ def test_list_data_labeling_jobs_pager(): next_page_token="abc", ), job_service.ListDataLabelingJobsResponse( - data_labeling_jobs=[], next_page_token="def", + data_labeling_jobs=[], + next_page_token="def", ), job_service.ListDataLabelingJobsResponse( - data_labeling_jobs=[data_labeling_job.DataLabelingJob(),], + data_labeling_jobs=[ + data_labeling_job.DataLabelingJob(), + ], next_page_token="ghi", ), job_service.ListDataLabelingJobsResponse( @@ -2406,7 +2657,9 @@ def test_list_data_labeling_jobs_pager(): def test_list_data_labeling_jobs_pages(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials,) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2423,10 +2676,13 @@ def test_list_data_labeling_jobs_pages(): next_page_token="abc", ), job_service.ListDataLabelingJobsResponse( - data_labeling_jobs=[], next_page_token="def", + data_labeling_jobs=[], + next_page_token="def", ), job_service.ListDataLabelingJobsResponse( - data_labeling_jobs=[data_labeling_job.DataLabelingJob(),], + data_labeling_jobs=[ + data_labeling_job.DataLabelingJob(), + ], next_page_token="ghi", ), job_service.ListDataLabelingJobsResponse( @@ -2444,7 +2700,9 @@ def test_list_data_labeling_jobs_pages(): @pytest.mark.asyncio async def test_list_data_labeling_jobs_async_pager(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2463,10 +2721,13 @@ async def test_list_data_labeling_jobs_async_pager(): next_page_token="abc", ), job_service.ListDataLabelingJobsResponse( - data_labeling_jobs=[], next_page_token="def", + data_labeling_jobs=[], + next_page_token="def", ), job_service.ListDataLabelingJobsResponse( - data_labeling_jobs=[data_labeling_job.DataLabelingJob(),], + data_labeling_jobs=[ + data_labeling_job.DataLabelingJob(), + ], next_page_token="ghi", ), job_service.ListDataLabelingJobsResponse( @@ -2477,7 +2738,9 @@ async def test_list_data_labeling_jobs_async_pager(): ), RuntimeError, ) - async_pager = await client.list_data_labeling_jobs(request={},) + async_pager = await client.list_data_labeling_jobs( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -2489,7 +2752,9 @@ async def test_list_data_labeling_jobs_async_pager(): @pytest.mark.asyncio async def test_list_data_labeling_jobs_async_pages(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2508,10 +2773,13 @@ async def test_list_data_labeling_jobs_async_pages(): next_page_token="abc", ), job_service.ListDataLabelingJobsResponse( - data_labeling_jobs=[], next_page_token="def", + data_labeling_jobs=[], + next_page_token="def", ), job_service.ListDataLabelingJobsResponse( - data_labeling_jobs=[data_labeling_job.DataLabelingJob(),], + data_labeling_jobs=[ + data_labeling_job.DataLabelingJob(), + ], next_page_token="ghi", ), job_service.ListDataLabelingJobsResponse( @@ -2533,7 +2801,8 @@ def test_delete_data_labeling_job( transport: str = "grpc", request_type=job_service.DeleteDataLabelingJobRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2569,7 +2838,8 @@ async def test_delete_data_labeling_job_async( request_type=job_service.DeleteDataLabelingJobRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2603,7 +2873,9 @@ async def test_delete_data_labeling_job_async_from_dict(): def test_delete_data_labeling_job_field_headers(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2625,12 +2897,17 @@ def test_delete_data_labeling_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_delete_data_labeling_job_field_headers_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2654,11 +2931,16 @@ async def test_delete_data_labeling_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_delete_data_labeling_job_flattened(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2669,7 +2951,9 @@ def test_delete_data_labeling_job_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_data_labeling_job(name="name_value",) + client.delete_data_labeling_job( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2680,19 +2964,24 @@ def test_delete_data_labeling_job_flattened(): def test_delete_data_labeling_job_flattened_error(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_data_labeling_job( - job_service.DeleteDataLabelingJobRequest(), name="name_value", + job_service.DeleteDataLabelingJobRequest(), + name="name_value", ) @pytest.mark.asyncio async def test_delete_data_labeling_job_flattened_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2706,7 +2995,9 @@ async def test_delete_data_labeling_job_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_data_labeling_job(name="name_value",) + response = await client.delete_data_labeling_job( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2718,13 +3009,16 @@ async def test_delete_data_labeling_job_flattened_async(): @pytest.mark.asyncio async def test_delete_data_labeling_job_flattened_error_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_data_labeling_job( - job_service.DeleteDataLabelingJobRequest(), name="name_value", + job_service.DeleteDataLabelingJobRequest(), + name="name_value", ) @@ -2732,7 +3026,8 @@ def test_cancel_data_labeling_job( transport: str = "grpc", request_type=job_service.CancelDataLabelingJobRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2768,7 +3063,8 @@ async def test_cancel_data_labeling_job_async( request_type=job_service.CancelDataLabelingJobRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2800,7 +3096,9 @@ async def test_cancel_data_labeling_job_async_from_dict(): def test_cancel_data_labeling_job_field_headers(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2822,12 +3120,17 @@ def test_cancel_data_labeling_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_cancel_data_labeling_job_field_headers_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2849,11 +3152,16 @@ async def test_cancel_data_labeling_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_cancel_data_labeling_job_flattened(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2864,7 +3172,9 @@ def test_cancel_data_labeling_job_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.cancel_data_labeling_job(name="name_value",) + client.cancel_data_labeling_job( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2875,19 +3185,24 @@ def test_cancel_data_labeling_job_flattened(): def test_cancel_data_labeling_job_flattened_error(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.cancel_data_labeling_job( - job_service.CancelDataLabelingJobRequest(), name="name_value", + job_service.CancelDataLabelingJobRequest(), + name="name_value", ) @pytest.mark.asyncio async def test_cancel_data_labeling_job_flattened_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2899,7 +3214,9 @@ async def test_cancel_data_labeling_job_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.cancel_data_labeling_job(name="name_value",) + response = await client.cancel_data_labeling_job( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2911,13 +3228,16 @@ async def test_cancel_data_labeling_job_flattened_async(): @pytest.mark.asyncio async def test_cancel_data_labeling_job_flattened_error_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.cancel_data_labeling_job( - job_service.CancelDataLabelingJobRequest(), name="name_value", + job_service.CancelDataLabelingJobRequest(), + name="name_value", ) @@ -2926,7 +3246,8 @@ def test_create_hyperparameter_tuning_job( request_type=job_service.CreateHyperparameterTuningJobRequest, ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2982,7 +3303,8 @@ async def test_create_hyperparameter_tuning_job_async( request_type=job_service.CreateHyperparameterTuningJobRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3035,7 +3357,9 @@ async def test_create_hyperparameter_tuning_job_async_from_dict(): def test_create_hyperparameter_tuning_job_field_headers(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3057,12 +3381,17 @@ def test_create_hyperparameter_tuning_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_create_hyperparameter_tuning_job_field_headers_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3086,11 +3415,16 @@ async def test_create_hyperparameter_tuning_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_create_hyperparameter_tuning_job_flattened(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3123,7 +3457,9 @@ def test_create_hyperparameter_tuning_job_flattened(): def test_create_hyperparameter_tuning_job_flattened_error(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3139,7 +3475,9 @@ def test_create_hyperparameter_tuning_job_flattened_error(): @pytest.mark.asyncio async def test_create_hyperparameter_tuning_job_flattened_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3176,7 +3514,9 @@ async def test_create_hyperparameter_tuning_job_flattened_async(): @pytest.mark.asyncio async def test_create_hyperparameter_tuning_job_flattened_error_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3194,7 +3534,8 @@ def test_get_hyperparameter_tuning_job( transport: str = "grpc", request_type=job_service.GetHyperparameterTuningJobRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3250,7 +3591,8 @@ async def test_get_hyperparameter_tuning_job_async( request_type=job_service.GetHyperparameterTuningJobRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3303,7 +3645,9 @@ async def test_get_hyperparameter_tuning_job_async_from_dict(): def test_get_hyperparameter_tuning_job_field_headers(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3325,12 +3669,17 @@ def test_get_hyperparameter_tuning_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_get_hyperparameter_tuning_job_field_headers_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3354,11 +3703,16 @@ async def test_get_hyperparameter_tuning_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_get_hyperparameter_tuning_job_flattened(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3369,7 +3723,9 @@ def test_get_hyperparameter_tuning_job_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_hyperparameter_tuning_job(name="name_value",) + client.get_hyperparameter_tuning_job( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -3380,19 +3736,24 @@ def test_get_hyperparameter_tuning_job_flattened(): def test_get_hyperparameter_tuning_job_flattened_error(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_hyperparameter_tuning_job( - job_service.GetHyperparameterTuningJobRequest(), name="name_value", + job_service.GetHyperparameterTuningJobRequest(), + name="name_value", ) @pytest.mark.asyncio async def test_get_hyperparameter_tuning_job_flattened_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3406,7 +3767,9 @@ async def test_get_hyperparameter_tuning_job_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_hyperparameter_tuning_job(name="name_value",) + response = await client.get_hyperparameter_tuning_job( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -3418,13 +3781,16 @@ async def test_get_hyperparameter_tuning_job_flattened_async(): @pytest.mark.asyncio async def test_get_hyperparameter_tuning_job_flattened_error_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_hyperparameter_tuning_job( - job_service.GetHyperparameterTuningJobRequest(), name="name_value", + job_service.GetHyperparameterTuningJobRequest(), + name="name_value", ) @@ -3433,7 +3799,8 @@ def test_list_hyperparameter_tuning_jobs( request_type=job_service.ListHyperparameterTuningJobsRequest, ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3474,7 +3841,8 @@ async def test_list_hyperparameter_tuning_jobs_async( request_type=job_service.ListHyperparameterTuningJobsRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3512,7 +3880,9 @@ async def test_list_hyperparameter_tuning_jobs_async_from_dict(): def test_list_hyperparameter_tuning_jobs_field_headers(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3534,12 +3904,17 @@ def test_list_hyperparameter_tuning_jobs_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_list_hyperparameter_tuning_jobs_field_headers_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3563,11 +3938,16 @@ async def test_list_hyperparameter_tuning_jobs_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_list_hyperparameter_tuning_jobs_flattened(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3578,7 +3958,9 @@ def test_list_hyperparameter_tuning_jobs_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_hyperparameter_tuning_jobs(parent="parent_value",) + client.list_hyperparameter_tuning_jobs( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -3589,19 +3971,24 @@ def test_list_hyperparameter_tuning_jobs_flattened(): def test_list_hyperparameter_tuning_jobs_flattened_error(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_hyperparameter_tuning_jobs( - job_service.ListHyperparameterTuningJobsRequest(), parent="parent_value", + job_service.ListHyperparameterTuningJobsRequest(), + parent="parent_value", ) @pytest.mark.asyncio async def test_list_hyperparameter_tuning_jobs_flattened_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3615,7 +4002,9 @@ async def test_list_hyperparameter_tuning_jobs_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_hyperparameter_tuning_jobs(parent="parent_value",) + response = await client.list_hyperparameter_tuning_jobs( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -3627,18 +4016,23 @@ async def test_list_hyperparameter_tuning_jobs_flattened_async(): @pytest.mark.asyncio async def test_list_hyperparameter_tuning_jobs_flattened_error_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_hyperparameter_tuning_jobs( - job_service.ListHyperparameterTuningJobsRequest(), parent="parent_value", + job_service.ListHyperparameterTuningJobsRequest(), + parent="parent_value", ) def test_list_hyperparameter_tuning_jobs_pager(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials,) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3655,7 +4049,8 @@ def test_list_hyperparameter_tuning_jobs_pager(): next_page_token="abc", ), job_service.ListHyperparameterTuningJobsResponse( - hyperparameter_tuning_jobs=[], next_page_token="def", + hyperparameter_tuning_jobs=[], + next_page_token="def", ), job_service.ListHyperparameterTuningJobsResponse( hyperparameter_tuning_jobs=[ @@ -3689,7 +4084,9 @@ def test_list_hyperparameter_tuning_jobs_pager(): def test_list_hyperparameter_tuning_jobs_pages(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials,) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3706,7 +4103,8 @@ def test_list_hyperparameter_tuning_jobs_pages(): next_page_token="abc", ), job_service.ListHyperparameterTuningJobsResponse( - hyperparameter_tuning_jobs=[], next_page_token="def", + hyperparameter_tuning_jobs=[], + next_page_token="def", ), job_service.ListHyperparameterTuningJobsResponse( hyperparameter_tuning_jobs=[ @@ -3729,7 +4127,9 @@ def test_list_hyperparameter_tuning_jobs_pages(): @pytest.mark.asyncio async def test_list_hyperparameter_tuning_jobs_async_pager(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3748,7 +4148,8 @@ async def test_list_hyperparameter_tuning_jobs_async_pager(): next_page_token="abc", ), job_service.ListHyperparameterTuningJobsResponse( - hyperparameter_tuning_jobs=[], next_page_token="def", + hyperparameter_tuning_jobs=[], + next_page_token="def", ), job_service.ListHyperparameterTuningJobsResponse( hyperparameter_tuning_jobs=[ @@ -3764,7 +4165,9 @@ async def test_list_hyperparameter_tuning_jobs_async_pager(): ), RuntimeError, ) - async_pager = await client.list_hyperparameter_tuning_jobs(request={},) + async_pager = await client.list_hyperparameter_tuning_jobs( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -3779,7 +4182,9 @@ async def test_list_hyperparameter_tuning_jobs_async_pager(): @pytest.mark.asyncio async def test_list_hyperparameter_tuning_jobs_async_pages(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3798,7 +4203,8 @@ async def test_list_hyperparameter_tuning_jobs_async_pages(): next_page_token="abc", ), job_service.ListHyperparameterTuningJobsResponse( - hyperparameter_tuning_jobs=[], next_page_token="def", + hyperparameter_tuning_jobs=[], + next_page_token="def", ), job_service.ListHyperparameterTuningJobsResponse( hyperparameter_tuning_jobs=[ @@ -3828,7 +4234,8 @@ def test_delete_hyperparameter_tuning_job( request_type=job_service.DeleteHyperparameterTuningJobRequest, ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3864,7 +4271,8 @@ async def test_delete_hyperparameter_tuning_job_async( request_type=job_service.DeleteHyperparameterTuningJobRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3898,7 +4306,9 @@ async def test_delete_hyperparameter_tuning_job_async_from_dict(): def test_delete_hyperparameter_tuning_job_field_headers(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3920,12 +4330,17 @@ def test_delete_hyperparameter_tuning_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_delete_hyperparameter_tuning_job_field_headers_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3949,11 +4364,16 @@ async def test_delete_hyperparameter_tuning_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_delete_hyperparameter_tuning_job_flattened(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3964,7 +4384,9 @@ def test_delete_hyperparameter_tuning_job_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_hyperparameter_tuning_job(name="name_value",) + client.delete_hyperparameter_tuning_job( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -3975,19 +4397,24 @@ def test_delete_hyperparameter_tuning_job_flattened(): def test_delete_hyperparameter_tuning_job_flattened_error(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_hyperparameter_tuning_job( - job_service.DeleteHyperparameterTuningJobRequest(), name="name_value", + job_service.DeleteHyperparameterTuningJobRequest(), + name="name_value", ) @pytest.mark.asyncio async def test_delete_hyperparameter_tuning_job_flattened_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4001,7 +4428,9 @@ async def test_delete_hyperparameter_tuning_job_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_hyperparameter_tuning_job(name="name_value",) + response = await client.delete_hyperparameter_tuning_job( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -4013,13 +4442,16 @@ async def test_delete_hyperparameter_tuning_job_flattened_async(): @pytest.mark.asyncio async def test_delete_hyperparameter_tuning_job_flattened_error_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_hyperparameter_tuning_job( - job_service.DeleteHyperparameterTuningJobRequest(), name="name_value", + job_service.DeleteHyperparameterTuningJobRequest(), + name="name_value", ) @@ -4028,7 +4460,8 @@ def test_cancel_hyperparameter_tuning_job( request_type=job_service.CancelHyperparameterTuningJobRequest, ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4064,7 +4497,8 @@ async def test_cancel_hyperparameter_tuning_job_async( request_type=job_service.CancelHyperparameterTuningJobRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4096,7 +4530,9 @@ async def test_cancel_hyperparameter_tuning_job_async_from_dict(): def test_cancel_hyperparameter_tuning_job_field_headers(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -4118,12 +4554,17 @@ def test_cancel_hyperparameter_tuning_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_cancel_hyperparameter_tuning_job_field_headers_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -4145,11 +4586,16 @@ async def test_cancel_hyperparameter_tuning_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_cancel_hyperparameter_tuning_job_flattened(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4160,7 +4606,9 @@ def test_cancel_hyperparameter_tuning_job_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.cancel_hyperparameter_tuning_job(name="name_value",) + client.cancel_hyperparameter_tuning_job( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -4171,19 +4619,24 @@ def test_cancel_hyperparameter_tuning_job_flattened(): def test_cancel_hyperparameter_tuning_job_flattened_error(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.cancel_hyperparameter_tuning_job( - job_service.CancelHyperparameterTuningJobRequest(), name="name_value", + job_service.CancelHyperparameterTuningJobRequest(), + name="name_value", ) @pytest.mark.asyncio async def test_cancel_hyperparameter_tuning_job_flattened_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4195,7 +4648,9 @@ async def test_cancel_hyperparameter_tuning_job_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.cancel_hyperparameter_tuning_job(name="name_value",) + response = await client.cancel_hyperparameter_tuning_job( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -4207,13 +4662,16 @@ async def test_cancel_hyperparameter_tuning_job_flattened_async(): @pytest.mark.asyncio async def test_cancel_hyperparameter_tuning_job_flattened_error_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.cancel_hyperparameter_tuning_job( - job_service.CancelHyperparameterTuningJobRequest(), name="name_value", + job_service.CancelHyperparameterTuningJobRequest(), + name="name_value", ) @@ -4221,7 +4679,8 @@ def test_create_batch_prediction_job( transport: str = "grpc", request_type=job_service.CreateBatchPredictionJobRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4274,7 +4733,8 @@ async def test_create_batch_prediction_job_async( request_type=job_service.CreateBatchPredictionJobRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4324,7 +4784,9 @@ async def test_create_batch_prediction_job_async_from_dict(): def test_create_batch_prediction_job_field_headers(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -4346,12 +4808,17 @@ def test_create_batch_prediction_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_create_batch_prediction_job_field_headers_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -4375,11 +4842,16 @@ async def test_create_batch_prediction_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_create_batch_prediction_job_flattened(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4412,7 +4884,9 @@ def test_create_batch_prediction_job_flattened(): def test_create_batch_prediction_job_flattened_error(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4428,7 +4902,9 @@ def test_create_batch_prediction_job_flattened_error(): @pytest.mark.asyncio async def test_create_batch_prediction_job_flattened_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4465,7 +4941,9 @@ async def test_create_batch_prediction_job_flattened_async(): @pytest.mark.asyncio async def test_create_batch_prediction_job_flattened_error_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4483,7 +4961,8 @@ def test_get_batch_prediction_job( transport: str = "grpc", request_type=job_service.GetBatchPredictionJobRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4536,7 +5015,8 @@ async def test_get_batch_prediction_job_async( request_type=job_service.GetBatchPredictionJobRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4586,7 +5066,9 @@ async def test_get_batch_prediction_job_async_from_dict(): def test_get_batch_prediction_job_field_headers(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -4608,12 +5090,17 @@ def test_get_batch_prediction_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_get_batch_prediction_job_field_headers_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -4637,11 +5124,16 @@ async def test_get_batch_prediction_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_get_batch_prediction_job_flattened(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4652,7 +5144,9 @@ def test_get_batch_prediction_job_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_batch_prediction_job(name="name_value",) + client.get_batch_prediction_job( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -4663,19 +5157,24 @@ def test_get_batch_prediction_job_flattened(): def test_get_batch_prediction_job_flattened_error(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_batch_prediction_job( - job_service.GetBatchPredictionJobRequest(), name="name_value", + job_service.GetBatchPredictionJobRequest(), + name="name_value", ) @pytest.mark.asyncio async def test_get_batch_prediction_job_flattened_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4689,7 +5188,9 @@ async def test_get_batch_prediction_job_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_batch_prediction_job(name="name_value",) + response = await client.get_batch_prediction_job( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -4701,13 +5202,16 @@ async def test_get_batch_prediction_job_flattened_async(): @pytest.mark.asyncio async def test_get_batch_prediction_job_flattened_error_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_batch_prediction_job( - job_service.GetBatchPredictionJobRequest(), name="name_value", + job_service.GetBatchPredictionJobRequest(), + name="name_value", ) @@ -4715,7 +5219,8 @@ def test_list_batch_prediction_jobs( transport: str = "grpc", request_type=job_service.ListBatchPredictionJobsRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4756,7 +5261,8 @@ async def test_list_batch_prediction_jobs_async( request_type=job_service.ListBatchPredictionJobsRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4794,7 +5300,9 @@ async def test_list_batch_prediction_jobs_async_from_dict(): def test_list_batch_prediction_jobs_field_headers(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -4816,12 +5324,17 @@ def test_list_batch_prediction_jobs_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_list_batch_prediction_jobs_field_headers_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -4845,11 +5358,16 @@ async def test_list_batch_prediction_jobs_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_list_batch_prediction_jobs_flattened(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4860,7 +5378,9 @@ def test_list_batch_prediction_jobs_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_batch_prediction_jobs(parent="parent_value",) + client.list_batch_prediction_jobs( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -4871,19 +5391,24 @@ def test_list_batch_prediction_jobs_flattened(): def test_list_batch_prediction_jobs_flattened_error(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_batch_prediction_jobs( - job_service.ListBatchPredictionJobsRequest(), parent="parent_value", + job_service.ListBatchPredictionJobsRequest(), + parent="parent_value", ) @pytest.mark.asyncio async def test_list_batch_prediction_jobs_flattened_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4897,7 +5422,9 @@ async def test_list_batch_prediction_jobs_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_batch_prediction_jobs(parent="parent_value",) + response = await client.list_batch_prediction_jobs( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -4909,18 +5436,23 @@ async def test_list_batch_prediction_jobs_flattened_async(): @pytest.mark.asyncio async def test_list_batch_prediction_jobs_flattened_error_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_batch_prediction_jobs( - job_service.ListBatchPredictionJobsRequest(), parent="parent_value", + job_service.ListBatchPredictionJobsRequest(), + parent="parent_value", ) def test_list_batch_prediction_jobs_pager(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials,) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4937,10 +5469,13 @@ def test_list_batch_prediction_jobs_pager(): next_page_token="abc", ), job_service.ListBatchPredictionJobsResponse( - batch_prediction_jobs=[], next_page_token="def", + batch_prediction_jobs=[], + next_page_token="def", ), job_service.ListBatchPredictionJobsResponse( - batch_prediction_jobs=[batch_prediction_job.BatchPredictionJob(),], + batch_prediction_jobs=[ + batch_prediction_job.BatchPredictionJob(), + ], next_page_token="ghi", ), job_service.ListBatchPredictionJobsResponse( @@ -4968,7 +5503,9 @@ def test_list_batch_prediction_jobs_pager(): def test_list_batch_prediction_jobs_pages(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials,) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4985,10 +5522,13 @@ def test_list_batch_prediction_jobs_pages(): next_page_token="abc", ), job_service.ListBatchPredictionJobsResponse( - batch_prediction_jobs=[], next_page_token="def", + batch_prediction_jobs=[], + next_page_token="def", ), job_service.ListBatchPredictionJobsResponse( - batch_prediction_jobs=[batch_prediction_job.BatchPredictionJob(),], + batch_prediction_jobs=[ + batch_prediction_job.BatchPredictionJob(), + ], next_page_token="ghi", ), job_service.ListBatchPredictionJobsResponse( @@ -5006,7 +5546,9 @@ def test_list_batch_prediction_jobs_pages(): @pytest.mark.asyncio async def test_list_batch_prediction_jobs_async_pager(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5025,10 +5567,13 @@ async def test_list_batch_prediction_jobs_async_pager(): next_page_token="abc", ), job_service.ListBatchPredictionJobsResponse( - batch_prediction_jobs=[], next_page_token="def", + batch_prediction_jobs=[], + next_page_token="def", ), job_service.ListBatchPredictionJobsResponse( - batch_prediction_jobs=[batch_prediction_job.BatchPredictionJob(),], + batch_prediction_jobs=[ + batch_prediction_job.BatchPredictionJob(), + ], next_page_token="ghi", ), job_service.ListBatchPredictionJobsResponse( @@ -5039,7 +5584,9 @@ async def test_list_batch_prediction_jobs_async_pager(): ), RuntimeError, ) - async_pager = await client.list_batch_prediction_jobs(request={},) + async_pager = await client.list_batch_prediction_jobs( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -5053,7 +5600,9 @@ async def test_list_batch_prediction_jobs_async_pager(): @pytest.mark.asyncio async def test_list_batch_prediction_jobs_async_pages(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5072,10 +5621,13 @@ async def test_list_batch_prediction_jobs_async_pages(): next_page_token="abc", ), job_service.ListBatchPredictionJobsResponse( - batch_prediction_jobs=[], next_page_token="def", + batch_prediction_jobs=[], + next_page_token="def", ), job_service.ListBatchPredictionJobsResponse( - batch_prediction_jobs=[batch_prediction_job.BatchPredictionJob(),], + batch_prediction_jobs=[ + batch_prediction_job.BatchPredictionJob(), + ], next_page_token="ghi", ), job_service.ListBatchPredictionJobsResponse( @@ -5097,7 +5649,8 @@ def test_delete_batch_prediction_job( transport: str = "grpc", request_type=job_service.DeleteBatchPredictionJobRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5133,7 +5686,8 @@ async def test_delete_batch_prediction_job_async( request_type=job_service.DeleteBatchPredictionJobRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5167,7 +5721,9 @@ async def test_delete_batch_prediction_job_async_from_dict(): def test_delete_batch_prediction_job_field_headers(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -5189,12 +5745,17 @@ def test_delete_batch_prediction_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_delete_batch_prediction_job_field_headers_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -5218,11 +5779,16 @@ async def test_delete_batch_prediction_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_delete_batch_prediction_job_flattened(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5233,7 +5799,9 @@ def test_delete_batch_prediction_job_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_batch_prediction_job(name="name_value",) + client.delete_batch_prediction_job( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -5244,19 +5812,24 @@ def test_delete_batch_prediction_job_flattened(): def test_delete_batch_prediction_job_flattened_error(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_batch_prediction_job( - job_service.DeleteBatchPredictionJobRequest(), name="name_value", + job_service.DeleteBatchPredictionJobRequest(), + name="name_value", ) @pytest.mark.asyncio async def test_delete_batch_prediction_job_flattened_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5270,7 +5843,9 @@ async def test_delete_batch_prediction_job_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_batch_prediction_job(name="name_value",) + response = await client.delete_batch_prediction_job( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -5282,13 +5857,16 @@ async def test_delete_batch_prediction_job_flattened_async(): @pytest.mark.asyncio async def test_delete_batch_prediction_job_flattened_error_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_batch_prediction_job( - job_service.DeleteBatchPredictionJobRequest(), name="name_value", + job_service.DeleteBatchPredictionJobRequest(), + name="name_value", ) @@ -5296,7 +5874,8 @@ def test_cancel_batch_prediction_job( transport: str = "grpc", request_type=job_service.CancelBatchPredictionJobRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5332,7 +5911,8 @@ async def test_cancel_batch_prediction_job_async( request_type=job_service.CancelBatchPredictionJobRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5364,7 +5944,9 @@ async def test_cancel_batch_prediction_job_async_from_dict(): def test_cancel_batch_prediction_job_field_headers(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -5386,12 +5968,17 @@ def test_cancel_batch_prediction_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_cancel_batch_prediction_job_field_headers_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -5413,11 +6000,16 @@ async def test_cancel_batch_prediction_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_cancel_batch_prediction_job_flattened(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5428,7 +6020,9 @@ def test_cancel_batch_prediction_job_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.cancel_batch_prediction_job(name="name_value",) + client.cancel_batch_prediction_job( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -5439,19 +6033,24 @@ def test_cancel_batch_prediction_job_flattened(): def test_cancel_batch_prediction_job_flattened_error(): - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.cancel_batch_prediction_job( - job_service.CancelBatchPredictionJobRequest(), name="name_value", + job_service.CancelBatchPredictionJobRequest(), + name="name_value", ) @pytest.mark.asyncio async def test_cancel_batch_prediction_job_flattened_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5463,7 +6062,9 @@ async def test_cancel_batch_prediction_job_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.cancel_batch_prediction_job(name="name_value",) + response = await client.cancel_batch_prediction_job( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -5475,13 +6076,16 @@ async def test_cancel_batch_prediction_job_flattened_async(): @pytest.mark.asyncio async def test_cancel_batch_prediction_job_flattened_error_async(): - client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.cancel_batch_prediction_job( - job_service.CancelBatchPredictionJobRequest(), name="name_value", + job_service.CancelBatchPredictionJobRequest(), + name="name_value", ) @@ -5492,7 +6096,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -5511,7 +6116,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = JobServiceClient( - client_options={"scopes": ["1", "2"]}, transport=transport, + client_options={"scopes": ["1", "2"]}, + transport=transport, ) @@ -5553,8 +6159,13 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) - assert isinstance(client.transport, transports.JobServiceGrpcTransport,) + client = JobServiceClient( + credentials=credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.JobServiceGrpcTransport, + ) def test_job_service_base_transport_error(): @@ -5620,7 +6231,8 @@ def test_job_service_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (credentials.AnonymousCredentials(), None) transport = transports.JobServiceTransport( - credentials_file="credentials.json", quota_project_id="octopus", + credentials_file="credentials.json", + quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -5690,7 +6302,8 @@ def test_job_service_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.JobServiceGrpcTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -5702,7 +6315,8 @@ def test_job_service_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.JobServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -5791,12 +6405,16 @@ def test_job_service_transport_channel_mtls_with_adc(transport_class): def test_job_service_grpc_lro_client(): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=credentials.AnonymousCredentials(), + transport="grpc", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance(transport.operations_client, operations_v1.OperationsClient,) + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -5804,12 +6422,16 @@ def test_job_service_grpc_lro_client(): def test_job_service_grpc_lro_async_client(): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -5821,7 +6443,9 @@ def test_batch_prediction_job_path(): batch_prediction_job = "whelk" expected = "projects/{project}/locations/{location}/batchPredictionJobs/{batch_prediction_job}".format( - project=project, location=location, batch_prediction_job=batch_prediction_job, + project=project, + location=location, + batch_prediction_job=batch_prediction_job, ) actual = JobServiceClient.batch_prediction_job_path( project, location, batch_prediction_job @@ -5848,7 +6472,9 @@ def test_custom_job_path(): custom_job = "winkle" expected = "projects/{project}/locations/{location}/customJobs/{custom_job}".format( - project=project, location=location, custom_job=custom_job, + project=project, + location=location, + custom_job=custom_job, ) actual = JobServiceClient.custom_job_path(project, location, custom_job) assert expected == actual @@ -5873,7 +6499,9 @@ def test_data_labeling_job_path(): data_labeling_job = "whelk" expected = "projects/{project}/locations/{location}/dataLabelingJobs/{data_labeling_job}".format( - project=project, location=location, data_labeling_job=data_labeling_job, + project=project, + location=location, + data_labeling_job=data_labeling_job, ) actual = JobServiceClient.data_labeling_job_path( project, location, data_labeling_job @@ -5900,7 +6528,9 @@ def test_dataset_path(): dataset = "winkle" expected = "projects/{project}/locations/{location}/datasets/{dataset}".format( - project=project, location=location, dataset=dataset, + project=project, + location=location, + dataset=dataset, ) actual = JobServiceClient.dataset_path(project, location, dataset) assert expected == actual @@ -5954,7 +6584,9 @@ def test_model_path(): model = "winkle" expected = "projects/{project}/locations/{location}/models/{model}".format( - project=project, location=location, model=model, + project=project, + location=location, + model=model, ) actual = JobServiceClient.model_path(project, location, model) assert expected == actual @@ -5997,7 +6629,9 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format(folder=folder,) + expected = "folders/{folder}".format( + folder=folder, + ) actual = JobServiceClient.common_folder_path(folder) assert expected == actual @@ -6016,7 +6650,9 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format(organization=organization,) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = JobServiceClient.common_organization_path(organization) assert expected == actual @@ -6035,7 +6671,9 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format(project=project,) + expected = "projects/{project}".format( + project=project, + ) actual = JobServiceClient.common_project_path(project) assert expected == actual @@ -6056,7 +6694,8 @@ def test_common_location_path(): location = "nautilus" expected = "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) actual = JobServiceClient.common_location_path(project, location) assert expected == actual @@ -6081,7 +6720,8 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.JobServiceTransport, "_prep_wrapped_messages" ) as prep: client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -6090,6 +6730,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = JobServiceClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_migration_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_migration_service.py index 85e6a2d362..23f17a54b6 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_migration_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_migration_service.py @@ -393,7 +393,9 @@ def test_migration_service_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions(scopes=["1", "2"],) + options = client_options.ClientOptions( + scopes=["1", "2"], + ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) @@ -462,7 +464,8 @@ def test_search_migratable_resources( request_type=migration_service.SearchMigratableResourcesRequest, ): client = MigrationServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -503,7 +506,8 @@ async def test_search_migratable_resources_async( request_type=migration_service.SearchMigratableResourcesRequest, ): client = MigrationServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -541,7 +545,9 @@ async def test_search_migratable_resources_async_from_dict(): def test_search_migratable_resources_field_headers(): - client = MigrationServiceClient(credentials=credentials.AnonymousCredentials(),) + client = MigrationServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -563,7 +569,10 @@ def test_search_migratable_resources_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -594,11 +603,16 @@ async def test_search_migratable_resources_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_search_migratable_resources_flattened(): - client = MigrationServiceClient(credentials=credentials.AnonymousCredentials(),) + client = MigrationServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -609,7 +623,9 @@ def test_search_migratable_resources_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.search_migratable_resources(parent="parent_value",) + client.search_migratable_resources( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -620,13 +636,16 @@ def test_search_migratable_resources_flattened(): def test_search_migratable_resources_flattened_error(): - client = MigrationServiceClient(credentials=credentials.AnonymousCredentials(),) + client = MigrationServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.search_migratable_resources( - migration_service.SearchMigratableResourcesRequest(), parent="parent_value", + migration_service.SearchMigratableResourcesRequest(), + parent="parent_value", ) @@ -648,7 +667,9 @@ async def test_search_migratable_resources_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.search_migratable_resources(parent="parent_value",) + response = await client.search_migratable_resources( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -668,12 +689,15 @@ async def test_search_migratable_resources_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.search_migratable_resources( - migration_service.SearchMigratableResourcesRequest(), parent="parent_value", + migration_service.SearchMigratableResourcesRequest(), + parent="parent_value", ) def test_search_migratable_resources_pager(): - client = MigrationServiceClient(credentials=credentials.AnonymousCredentials,) + client = MigrationServiceClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -690,10 +714,13 @@ def test_search_migratable_resources_pager(): next_page_token="abc", ), migration_service.SearchMigratableResourcesResponse( - migratable_resources=[], next_page_token="def", + migratable_resources=[], + next_page_token="def", ), migration_service.SearchMigratableResourcesResponse( - migratable_resources=[migratable_resource.MigratableResource(),], + migratable_resources=[ + migratable_resource.MigratableResource(), + ], next_page_token="ghi", ), migration_service.SearchMigratableResourcesResponse( @@ -721,7 +748,9 @@ def test_search_migratable_resources_pager(): def test_search_migratable_resources_pages(): - client = MigrationServiceClient(credentials=credentials.AnonymousCredentials,) + client = MigrationServiceClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -738,10 +767,13 @@ def test_search_migratable_resources_pages(): next_page_token="abc", ), migration_service.SearchMigratableResourcesResponse( - migratable_resources=[], next_page_token="def", + migratable_resources=[], + next_page_token="def", ), migration_service.SearchMigratableResourcesResponse( - migratable_resources=[migratable_resource.MigratableResource(),], + migratable_resources=[ + migratable_resource.MigratableResource(), + ], next_page_token="ghi", ), migration_service.SearchMigratableResourcesResponse( @@ -759,7 +791,9 @@ def test_search_migratable_resources_pages(): @pytest.mark.asyncio async def test_search_migratable_resources_async_pager(): - client = MigrationServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = MigrationServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -778,10 +812,13 @@ async def test_search_migratable_resources_async_pager(): next_page_token="abc", ), migration_service.SearchMigratableResourcesResponse( - migratable_resources=[], next_page_token="def", + migratable_resources=[], + next_page_token="def", ), migration_service.SearchMigratableResourcesResponse( - migratable_resources=[migratable_resource.MigratableResource(),], + migratable_resources=[ + migratable_resource.MigratableResource(), + ], next_page_token="ghi", ), migration_service.SearchMigratableResourcesResponse( @@ -792,7 +829,9 @@ async def test_search_migratable_resources_async_pager(): ), RuntimeError, ) - async_pager = await client.search_migratable_resources(request={},) + async_pager = await client.search_migratable_resources( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -806,7 +845,9 @@ async def test_search_migratable_resources_async_pager(): @pytest.mark.asyncio async def test_search_migratable_resources_async_pages(): - client = MigrationServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = MigrationServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -825,10 +866,13 @@ async def test_search_migratable_resources_async_pages(): next_page_token="abc", ), migration_service.SearchMigratableResourcesResponse( - migratable_resources=[], next_page_token="def", + migratable_resources=[], + next_page_token="def", ), migration_service.SearchMigratableResourcesResponse( - migratable_resources=[migratable_resource.MigratableResource(),], + migratable_resources=[ + migratable_resource.MigratableResource(), + ], next_page_token="ghi", ), migration_service.SearchMigratableResourcesResponse( @@ -850,7 +894,8 @@ def test_batch_migrate_resources( transport: str = "grpc", request_type=migration_service.BatchMigrateResourcesRequest ): client = MigrationServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -886,7 +931,8 @@ async def test_batch_migrate_resources_async( request_type=migration_service.BatchMigrateResourcesRequest, ): client = MigrationServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -920,7 +966,9 @@ async def test_batch_migrate_resources_async_from_dict(): def test_batch_migrate_resources_field_headers(): - client = MigrationServiceClient(credentials=credentials.AnonymousCredentials(),) + client = MigrationServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -942,7 +990,10 @@ def test_batch_migrate_resources_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -973,11 +1024,16 @@ async def test_batch_migrate_resources_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_batch_migrate_resources_flattened(): - client = MigrationServiceClient(credentials=credentials.AnonymousCredentials(),) + client = MigrationServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1016,7 +1072,9 @@ def test_batch_migrate_resources_flattened(): def test_batch_migrate_resources_flattened_error(): - client = MigrationServiceClient(credentials=credentials.AnonymousCredentials(),) + client = MigrationServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1108,7 +1166,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = MigrationServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -1127,7 +1186,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = MigrationServiceClient( - client_options={"scopes": ["1", "2"]}, transport=transport, + client_options={"scopes": ["1", "2"]}, + transport=transport, ) @@ -1172,8 +1232,13 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = MigrationServiceClient(credentials=credentials.AnonymousCredentials(),) - assert isinstance(client.transport, transports.MigrationServiceGrpcTransport,) + client = MigrationServiceClient( + credentials=credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.MigrationServiceGrpcTransport, + ) def test_migration_service_base_transport_error(): @@ -1221,7 +1286,8 @@ def test_migration_service_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (credentials.AnonymousCredentials(), None) transport = transports.MigrationServiceTransport( - credentials_file="credentials.json", quota_project_id="octopus", + credentials_file="credentials.json", + quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -1291,7 +1357,8 @@ def test_migration_service_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.MigrationServiceGrpcTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -1303,7 +1370,8 @@ def test_migration_service_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.MigrationServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -1400,12 +1468,16 @@ def test_migration_service_transport_channel_mtls_with_adc(transport_class): def test_migration_service_grpc_lro_client(): client = MigrationServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=credentials.AnonymousCredentials(), + transport="grpc", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance(transport.operations_client, operations_v1.OperationsClient,) + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -1413,12 +1485,16 @@ def test_migration_service_grpc_lro_client(): def test_migration_service_grpc_lro_async_client(): client = MigrationServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -1430,7 +1506,9 @@ def test_annotated_dataset_path(): annotated_dataset = "whelk" expected = "projects/{project}/datasets/{dataset}/annotatedDatasets/{annotated_dataset}".format( - project=project, dataset=dataset, annotated_dataset=annotated_dataset, + project=project, + dataset=dataset, + annotated_dataset=annotated_dataset, ) actual = MigrationServiceClient.annotated_dataset_path( project, dataset, annotated_dataset @@ -1457,7 +1535,9 @@ def test_dataset_path(): dataset = "winkle" expected = "projects/{project}/locations/{location}/datasets/{dataset}".format( - project=project, location=location, dataset=dataset, + project=project, + location=location, + dataset=dataset, ) actual = MigrationServiceClient.dataset_path(project, location, dataset) assert expected == actual @@ -1481,7 +1561,8 @@ def test_dataset_path(): dataset = "clam" expected = "projects/{project}/datasets/{dataset}".format( - project=project, dataset=dataset, + project=project, + dataset=dataset, ) actual = MigrationServiceClient.dataset_path(project, dataset) assert expected == actual @@ -1505,7 +1586,9 @@ def test_dataset_path(): dataset = "cuttlefish" expected = "projects/{project}/locations/{location}/datasets/{dataset}".format( - project=project, location=location, dataset=dataset, + project=project, + location=location, + dataset=dataset, ) actual = MigrationServiceClient.dataset_path(project, location, dataset) assert expected == actual @@ -1530,7 +1613,9 @@ def test_model_path(): model = "squid" expected = "projects/{project}/locations/{location}/models/{model}".format( - project=project, location=location, model=model, + project=project, + location=location, + model=model, ) actual = MigrationServiceClient.model_path(project, location, model) assert expected == actual @@ -1555,7 +1640,9 @@ def test_model_path(): model = "cuttlefish" expected = "projects/{project}/locations/{location}/models/{model}".format( - project=project, location=location, model=model, + project=project, + location=location, + model=model, ) actual = MigrationServiceClient.model_path(project, location, model) assert expected == actual @@ -1580,7 +1667,9 @@ def test_version_path(): version = "squid" expected = "projects/{project}/models/{model}/versions/{version}".format( - project=project, model=model, version=version, + project=project, + model=model, + version=version, ) actual = MigrationServiceClient.version_path(project, model, version) assert expected == actual @@ -1623,7 +1712,9 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder,) + expected = "folders/{folder}".format( + folder=folder, + ) actual = MigrationServiceClient.common_folder_path(folder) assert expected == actual @@ -1642,7 +1733,9 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "winkle" - expected = "organizations/{organization}".format(organization=organization,) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = MigrationServiceClient.common_organization_path(organization) assert expected == actual @@ -1661,7 +1754,9 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "scallop" - expected = "projects/{project}".format(project=project,) + expected = "projects/{project}".format( + project=project, + ) actual = MigrationServiceClient.common_project_path(project) assert expected == actual @@ -1682,7 +1777,8 @@ def test_common_location_path(): location = "clam" expected = "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) actual = MigrationServiceClient.common_location_path(project, location) assert expected == actual @@ -1707,7 +1803,8 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.MigrationServiceTransport, "_prep_wrapped_messages" ) as prep: client = MigrationServiceClient( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -1716,6 +1813,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = MigrationServiceClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_model_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_model_service.py index d05698a46a..97b32a4e78 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_model_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_model_service.py @@ -383,7 +383,9 @@ def test_model_service_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions(scopes=["1", "2"],) + options = client_options.ClientOptions( + scopes=["1", "2"], + ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) @@ -449,7 +451,8 @@ def test_upload_model( transport: str = "grpc", request_type=model_service.UploadModelRequest ): client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -482,7 +485,8 @@ async def test_upload_model_async( transport: str = "grpc_asyncio", request_type=model_service.UploadModelRequest ): client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -514,7 +518,9 @@ async def test_upload_model_async_from_dict(): def test_upload_model_field_headers(): - client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -534,12 +540,17 @@ def test_upload_model_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_upload_model_field_headers_async(): - client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -561,11 +572,16 @@ async def test_upload_model_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_upload_model_flattened(): - client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.upload_model), "__call__") as call: @@ -575,7 +591,8 @@ def test_upload_model_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.upload_model( - parent="parent_value", model=gca_model.Model(name="name_value"), + parent="parent_value", + model=gca_model.Model(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -589,7 +606,9 @@ def test_upload_model_flattened(): def test_upload_model_flattened_error(): - client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -603,7 +622,9 @@ def test_upload_model_flattened_error(): @pytest.mark.asyncio async def test_upload_model_flattened_async(): - client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.upload_model), "__call__") as call: @@ -616,7 +637,8 @@ async def test_upload_model_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.upload_model( - parent="parent_value", model=gca_model.Model(name="name_value"), + parent="parent_value", + model=gca_model.Model(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -631,7 +653,9 @@ async def test_upload_model_flattened_async(): @pytest.mark.asyncio async def test_upload_model_flattened_error_async(): - client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -645,7 +669,8 @@ async def test_upload_model_flattened_error_async(): def test_get_model(transport: str = "grpc", request_type=model_service.GetModelRequest): client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -718,7 +743,8 @@ async def test_get_model_async( transport: str = "grpc_asyncio", request_type=model_service.GetModelRequest ): client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -793,7 +819,9 @@ async def test_get_model_async_from_dict(): def test_get_model_field_headers(): - client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -813,12 +841,17 @@ def test_get_model_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_get_model_field_headers_async(): - client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -838,11 +871,16 @@ async def test_get_model_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_get_model_flattened(): - client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_model), "__call__") as call: @@ -851,7 +889,9 @@ def test_get_model_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_model(name="name_value",) + client.get_model( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -862,19 +902,24 @@ def test_get_model_flattened(): def test_get_model_flattened_error(): - client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_model( - model_service.GetModelRequest(), name="name_value", + model_service.GetModelRequest(), + name="name_value", ) @pytest.mark.asyncio async def test_get_model_flattened_async(): - client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_model), "__call__") as call: @@ -884,7 +929,9 @@ async def test_get_model_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(model.Model()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_model(name="name_value",) + response = await client.get_model( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -896,13 +943,16 @@ async def test_get_model_flattened_async(): @pytest.mark.asyncio async def test_get_model_flattened_error_async(): - client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_model( - model_service.GetModelRequest(), name="name_value", + model_service.GetModelRequest(), + name="name_value", ) @@ -910,7 +960,8 @@ def test_list_models( transport: str = "grpc", request_type=model_service.ListModelsRequest ): client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -948,7 +999,8 @@ async def test_list_models_async( transport: str = "grpc_asyncio", request_type=model_service.ListModelsRequest ): client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -959,7 +1011,9 @@ async def test_list_models_async( with mock.patch.object(type(client.transport.list_models), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - model_service.ListModelsResponse(next_page_token="next_page_token_value",) + model_service.ListModelsResponse( + next_page_token="next_page_token_value", + ) ) response = await client.list_models(request) @@ -982,7 +1036,9 @@ async def test_list_models_async_from_dict(): def test_list_models_field_headers(): - client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1002,12 +1058,17 @@ def test_list_models_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_list_models_field_headers_async(): - client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1029,11 +1090,16 @@ async def test_list_models_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_list_models_flattened(): - client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_models), "__call__") as call: @@ -1042,7 +1108,9 @@ def test_list_models_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_models(parent="parent_value",) + client.list_models( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1053,19 +1121,24 @@ def test_list_models_flattened(): def test_list_models_flattened_error(): - client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_models( - model_service.ListModelsRequest(), parent="parent_value", + model_service.ListModelsRequest(), + parent="parent_value", ) @pytest.mark.asyncio async def test_list_models_flattened_async(): - client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_models), "__call__") as call: @@ -1077,7 +1150,9 @@ async def test_list_models_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_models(parent="parent_value",) + response = await client.list_models( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1089,32 +1164,52 @@ async def test_list_models_flattened_async(): @pytest.mark.asyncio async def test_list_models_flattened_error_async(): - client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_models( - model_service.ListModelsRequest(), parent="parent_value", + model_service.ListModelsRequest(), + parent="parent_value", ) def test_list_models_pager(): - client = ModelServiceClient(credentials=credentials.AnonymousCredentials,) + client = ModelServiceClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_models), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( model_service.ListModelsResponse( - models=[model.Model(), model.Model(), model.Model(),], + models=[ + model.Model(), + model.Model(), + model.Model(), + ], next_page_token="abc", ), - model_service.ListModelsResponse(models=[], next_page_token="def",), model_service.ListModelsResponse( - models=[model.Model(),], next_page_token="ghi", + models=[], + next_page_token="def", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + ], + next_page_token="ghi", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + ], ), - model_service.ListModelsResponse(models=[model.Model(), model.Model(),],), RuntimeError, ) @@ -1132,21 +1227,38 @@ def test_list_models_pager(): def test_list_models_pages(): - client = ModelServiceClient(credentials=credentials.AnonymousCredentials,) + client = ModelServiceClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_models), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( model_service.ListModelsResponse( - models=[model.Model(), model.Model(), model.Model(),], + models=[ + model.Model(), + model.Model(), + model.Model(), + ], next_page_token="abc", ), - model_service.ListModelsResponse(models=[], next_page_token="def",), model_service.ListModelsResponse( - models=[model.Model(),], next_page_token="ghi", + models=[], + next_page_token="def", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + ], + next_page_token="ghi", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + ], ), - model_service.ListModelsResponse(models=[model.Model(), model.Model(),],), RuntimeError, ) pages = list(client.list_models(request={}).pages) @@ -1156,7 +1268,9 @@ def test_list_models_pages(): @pytest.mark.asyncio async def test_list_models_async_pager(): - client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = ModelServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1165,17 +1279,34 @@ async def test_list_models_async_pager(): # Set the response to a series of pages. call.side_effect = ( model_service.ListModelsResponse( - models=[model.Model(), model.Model(), model.Model(),], + models=[ + model.Model(), + model.Model(), + model.Model(), + ], next_page_token="abc", ), - model_service.ListModelsResponse(models=[], next_page_token="def",), model_service.ListModelsResponse( - models=[model.Model(),], next_page_token="ghi", + models=[], + next_page_token="def", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + ], + next_page_token="ghi", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + ], ), - model_service.ListModelsResponse(models=[model.Model(), model.Model(),],), RuntimeError, ) - async_pager = await client.list_models(request={},) + async_pager = await client.list_models( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -1187,7 +1318,9 @@ async def test_list_models_async_pager(): @pytest.mark.asyncio async def test_list_models_async_pages(): - client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = ModelServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1196,14 +1329,29 @@ async def test_list_models_async_pages(): # Set the response to a series of pages. call.side_effect = ( model_service.ListModelsResponse( - models=[model.Model(), model.Model(), model.Model(),], + models=[ + model.Model(), + model.Model(), + model.Model(), + ], next_page_token="abc", ), - model_service.ListModelsResponse(models=[], next_page_token="def",), model_service.ListModelsResponse( - models=[model.Model(),], next_page_token="ghi", + models=[], + next_page_token="def", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + ], + next_page_token="ghi", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + ], ), - model_service.ListModelsResponse(models=[model.Model(), model.Model(),],), RuntimeError, ) pages = [] @@ -1217,7 +1365,8 @@ def test_update_model( transport: str = "grpc", request_type=model_service.UpdateModelRequest ): client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1290,7 +1439,8 @@ async def test_update_model_async( transport: str = "grpc_asyncio", request_type=model_service.UpdateModelRequest ): client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1365,7 +1515,9 @@ async def test_update_model_async_from_dict(): def test_update_model_field_headers(): - client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1385,12 +1537,17 @@ def test_update_model_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "model.name=model.name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "model.name=model.name/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_update_model_field_headers_async(): - client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1410,11 +1567,16 @@ async def test_update_model_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "model.name=model.name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "model.name=model.name/value", + ) in kw["metadata"] def test_update_model_flattened(): - client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_model), "__call__") as call: @@ -1439,7 +1601,9 @@ def test_update_model_flattened(): def test_update_model_flattened_error(): - client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1453,7 +1617,9 @@ def test_update_model_flattened_error(): @pytest.mark.asyncio async def test_update_model_flattened_async(): - client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_model), "__call__") as call: @@ -1480,7 +1646,9 @@ async def test_update_model_flattened_async(): @pytest.mark.asyncio async def test_update_model_flattened_error_async(): - client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1496,7 +1664,8 @@ def test_delete_model( transport: str = "grpc", request_type=model_service.DeleteModelRequest ): client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1529,7 +1698,8 @@ async def test_delete_model_async( transport: str = "grpc_asyncio", request_type=model_service.DeleteModelRequest ): client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1561,7 +1731,9 @@ async def test_delete_model_async_from_dict(): def test_delete_model_field_headers(): - client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1581,12 +1753,17 @@ def test_delete_model_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_delete_model_field_headers_async(): - client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1608,11 +1785,16 @@ async def test_delete_model_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_delete_model_flattened(): - client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_model), "__call__") as call: @@ -1621,7 +1803,9 @@ def test_delete_model_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_model(name="name_value",) + client.delete_model( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1632,19 +1816,24 @@ def test_delete_model_flattened(): def test_delete_model_flattened_error(): - client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_model( - model_service.DeleteModelRequest(), name="name_value", + model_service.DeleteModelRequest(), + name="name_value", ) @pytest.mark.asyncio async def test_delete_model_flattened_async(): - client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_model), "__call__") as call: @@ -1656,7 +1845,9 @@ async def test_delete_model_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_model(name="name_value",) + response = await client.delete_model( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1668,13 +1859,16 @@ async def test_delete_model_flattened_async(): @pytest.mark.asyncio async def test_delete_model_flattened_error_async(): - client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_model( - model_service.DeleteModelRequest(), name="name_value", + model_service.DeleteModelRequest(), + name="name_value", ) @@ -1682,7 +1876,8 @@ def test_export_model( transport: str = "grpc", request_type=model_service.ExportModelRequest ): client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1715,7 +1910,8 @@ async def test_export_model_async( transport: str = "grpc_asyncio", request_type=model_service.ExportModelRequest ): client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1747,7 +1943,9 @@ async def test_export_model_async_from_dict(): def test_export_model_field_headers(): - client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1767,12 +1965,17 @@ def test_export_model_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_export_model_field_headers_async(): - client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1794,11 +1997,16 @@ async def test_export_model_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_export_model_flattened(): - client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.export_model), "__call__") as call: @@ -1827,7 +2035,9 @@ def test_export_model_flattened(): def test_export_model_flattened_error(): - client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1843,7 +2053,9 @@ def test_export_model_flattened_error(): @pytest.mark.asyncio async def test_export_model_flattened_async(): - client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.export_model), "__call__") as call: @@ -1876,7 +2088,9 @@ async def test_export_model_flattened_async(): @pytest.mark.asyncio async def test_export_model_flattened_error_async(): - client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1894,7 +2108,8 @@ def test_get_model_evaluation( transport: str = "grpc", request_type=model_service.GetModelEvaluationRequest ): client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1941,7 +2156,8 @@ async def test_get_model_evaluation_async( request_type=model_service.GetModelEvaluationRequest, ): client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1985,7 +2201,9 @@ async def test_get_model_evaluation_async_from_dict(): def test_get_model_evaluation_field_headers(): - client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2007,12 +2225,17 @@ def test_get_model_evaluation_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_get_model_evaluation_field_headers_async(): - client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2036,11 +2259,16 @@ async def test_get_model_evaluation_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_get_model_evaluation_flattened(): - client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2051,7 +2279,9 @@ def test_get_model_evaluation_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_model_evaluation(name="name_value",) + client.get_model_evaluation( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2062,19 +2292,24 @@ def test_get_model_evaluation_flattened(): def test_get_model_evaluation_flattened_error(): - client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_model_evaluation( - model_service.GetModelEvaluationRequest(), name="name_value", + model_service.GetModelEvaluationRequest(), + name="name_value", ) @pytest.mark.asyncio async def test_get_model_evaluation_flattened_async(): - client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2088,7 +2323,9 @@ async def test_get_model_evaluation_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_model_evaluation(name="name_value",) + response = await client.get_model_evaluation( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2100,13 +2337,16 @@ async def test_get_model_evaluation_flattened_async(): @pytest.mark.asyncio async def test_get_model_evaluation_flattened_error_async(): - client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_model_evaluation( - model_service.GetModelEvaluationRequest(), name="name_value", + model_service.GetModelEvaluationRequest(), + name="name_value", ) @@ -2114,7 +2354,8 @@ def test_list_model_evaluations( transport: str = "grpc", request_type=model_service.ListModelEvaluationsRequest ): client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2155,7 +2396,8 @@ async def test_list_model_evaluations_async( request_type=model_service.ListModelEvaluationsRequest, ): client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2193,7 +2435,9 @@ async def test_list_model_evaluations_async_from_dict(): def test_list_model_evaluations_field_headers(): - client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2215,12 +2459,17 @@ def test_list_model_evaluations_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_list_model_evaluations_field_headers_async(): - client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2244,11 +2493,16 @@ async def test_list_model_evaluations_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_list_model_evaluations_flattened(): - client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2259,7 +2513,9 @@ def test_list_model_evaluations_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_model_evaluations(parent="parent_value",) + client.list_model_evaluations( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2270,19 +2526,24 @@ def test_list_model_evaluations_flattened(): def test_list_model_evaluations_flattened_error(): - client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_model_evaluations( - model_service.ListModelEvaluationsRequest(), parent="parent_value", + model_service.ListModelEvaluationsRequest(), + parent="parent_value", ) @pytest.mark.asyncio async def test_list_model_evaluations_flattened_async(): - client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2296,7 +2557,9 @@ async def test_list_model_evaluations_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_model_evaluations(parent="parent_value",) + response = await client.list_model_evaluations( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2308,18 +2571,23 @@ async def test_list_model_evaluations_flattened_async(): @pytest.mark.asyncio async def test_list_model_evaluations_flattened_error_async(): - client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_model_evaluations( - model_service.ListModelEvaluationsRequest(), parent="parent_value", + model_service.ListModelEvaluationsRequest(), + parent="parent_value", ) def test_list_model_evaluations_pager(): - client = ModelServiceClient(credentials=credentials.AnonymousCredentials,) + client = ModelServiceClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2336,10 +2604,13 @@ def test_list_model_evaluations_pager(): next_page_token="abc", ), model_service.ListModelEvaluationsResponse( - model_evaluations=[], next_page_token="def", + model_evaluations=[], + next_page_token="def", ), model_service.ListModelEvaluationsResponse( - model_evaluations=[model_evaluation.ModelEvaluation(),], + model_evaluations=[ + model_evaluation.ModelEvaluation(), + ], next_page_token="ghi", ), model_service.ListModelEvaluationsResponse( @@ -2365,7 +2636,9 @@ def test_list_model_evaluations_pager(): def test_list_model_evaluations_pages(): - client = ModelServiceClient(credentials=credentials.AnonymousCredentials,) + client = ModelServiceClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2382,10 +2655,13 @@ def test_list_model_evaluations_pages(): next_page_token="abc", ), model_service.ListModelEvaluationsResponse( - model_evaluations=[], next_page_token="def", + model_evaluations=[], + next_page_token="def", ), model_service.ListModelEvaluationsResponse( - model_evaluations=[model_evaluation.ModelEvaluation(),], + model_evaluations=[ + model_evaluation.ModelEvaluation(), + ], next_page_token="ghi", ), model_service.ListModelEvaluationsResponse( @@ -2403,7 +2679,9 @@ def test_list_model_evaluations_pages(): @pytest.mark.asyncio async def test_list_model_evaluations_async_pager(): - client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = ModelServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2422,10 +2700,13 @@ async def test_list_model_evaluations_async_pager(): next_page_token="abc", ), model_service.ListModelEvaluationsResponse( - model_evaluations=[], next_page_token="def", + model_evaluations=[], + next_page_token="def", ), model_service.ListModelEvaluationsResponse( - model_evaluations=[model_evaluation.ModelEvaluation(),], + model_evaluations=[ + model_evaluation.ModelEvaluation(), + ], next_page_token="ghi", ), model_service.ListModelEvaluationsResponse( @@ -2436,7 +2717,9 @@ async def test_list_model_evaluations_async_pager(): ), RuntimeError, ) - async_pager = await client.list_model_evaluations(request={},) + async_pager = await client.list_model_evaluations( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -2448,7 +2731,9 @@ async def test_list_model_evaluations_async_pager(): @pytest.mark.asyncio async def test_list_model_evaluations_async_pages(): - client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = ModelServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2467,10 +2752,13 @@ async def test_list_model_evaluations_async_pages(): next_page_token="abc", ), model_service.ListModelEvaluationsResponse( - model_evaluations=[], next_page_token="def", + model_evaluations=[], + next_page_token="def", ), model_service.ListModelEvaluationsResponse( - model_evaluations=[model_evaluation.ModelEvaluation(),], + model_evaluations=[ + model_evaluation.ModelEvaluation(), + ], next_page_token="ghi", ), model_service.ListModelEvaluationsResponse( @@ -2492,7 +2780,8 @@ def test_get_model_evaluation_slice( transport: str = "grpc", request_type=model_service.GetModelEvaluationSliceRequest ): client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2505,7 +2794,8 @@ def test_get_model_evaluation_slice( ) as call: # Designate an appropriate return value for the call. call.return_value = model_evaluation_slice.ModelEvaluationSlice( - name="name_value", metrics_schema_uri="metrics_schema_uri_value", + name="name_value", + metrics_schema_uri="metrics_schema_uri_value", ) response = client.get_model_evaluation_slice(request) @@ -2535,7 +2825,8 @@ async def test_get_model_evaluation_slice_async( request_type=model_service.GetModelEvaluationSliceRequest, ): client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2549,7 +2840,8 @@ async def test_get_model_evaluation_slice_async( # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( model_evaluation_slice.ModelEvaluationSlice( - name="name_value", metrics_schema_uri="metrics_schema_uri_value", + name="name_value", + metrics_schema_uri="metrics_schema_uri_value", ) ) @@ -2575,7 +2867,9 @@ async def test_get_model_evaluation_slice_async_from_dict(): def test_get_model_evaluation_slice_field_headers(): - client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2597,12 +2891,17 @@ def test_get_model_evaluation_slice_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_get_model_evaluation_slice_field_headers_async(): - client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2626,11 +2925,16 @@ async def test_get_model_evaluation_slice_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_get_model_evaluation_slice_flattened(): - client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2641,7 +2945,9 @@ def test_get_model_evaluation_slice_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_model_evaluation_slice(name="name_value",) + client.get_model_evaluation_slice( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2652,19 +2958,24 @@ def test_get_model_evaluation_slice_flattened(): def test_get_model_evaluation_slice_flattened_error(): - client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_model_evaluation_slice( - model_service.GetModelEvaluationSliceRequest(), name="name_value", + model_service.GetModelEvaluationSliceRequest(), + name="name_value", ) @pytest.mark.asyncio async def test_get_model_evaluation_slice_flattened_async(): - client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2678,7 +2989,9 @@ async def test_get_model_evaluation_slice_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_model_evaluation_slice(name="name_value",) + response = await client.get_model_evaluation_slice( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2690,13 +3003,16 @@ async def test_get_model_evaluation_slice_flattened_async(): @pytest.mark.asyncio async def test_get_model_evaluation_slice_flattened_error_async(): - client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_model_evaluation_slice( - model_service.GetModelEvaluationSliceRequest(), name="name_value", + model_service.GetModelEvaluationSliceRequest(), + name="name_value", ) @@ -2704,7 +3020,8 @@ def test_list_model_evaluation_slices( transport: str = "grpc", request_type=model_service.ListModelEvaluationSlicesRequest ): client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2745,7 +3062,8 @@ async def test_list_model_evaluation_slices_async( request_type=model_service.ListModelEvaluationSlicesRequest, ): client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2783,7 +3101,9 @@ async def test_list_model_evaluation_slices_async_from_dict(): def test_list_model_evaluation_slices_field_headers(): - client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2805,12 +3125,17 @@ def test_list_model_evaluation_slices_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_list_model_evaluation_slices_field_headers_async(): - client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2834,11 +3159,16 @@ async def test_list_model_evaluation_slices_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_list_model_evaluation_slices_flattened(): - client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2849,7 +3179,9 @@ def test_list_model_evaluation_slices_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_model_evaluation_slices(parent="parent_value",) + client.list_model_evaluation_slices( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2860,19 +3192,24 @@ def test_list_model_evaluation_slices_flattened(): def test_list_model_evaluation_slices_flattened_error(): - client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_model_evaluation_slices( - model_service.ListModelEvaluationSlicesRequest(), parent="parent_value", + model_service.ListModelEvaluationSlicesRequest(), + parent="parent_value", ) @pytest.mark.asyncio async def test_list_model_evaluation_slices_flattened_async(): - client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2886,7 +3223,9 @@ async def test_list_model_evaluation_slices_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_model_evaluation_slices(parent="parent_value",) + response = await client.list_model_evaluation_slices( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2898,18 +3237,23 @@ async def test_list_model_evaluation_slices_flattened_async(): @pytest.mark.asyncio async def test_list_model_evaluation_slices_flattened_error_async(): - client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ModelServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_model_evaluation_slices( - model_service.ListModelEvaluationSlicesRequest(), parent="parent_value", + model_service.ListModelEvaluationSlicesRequest(), + parent="parent_value", ) def test_list_model_evaluation_slices_pager(): - client = ModelServiceClient(credentials=credentials.AnonymousCredentials,) + client = ModelServiceClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2926,7 +3270,8 @@ def test_list_model_evaluation_slices_pager(): next_page_token="abc", ), model_service.ListModelEvaluationSlicesResponse( - model_evaluation_slices=[], next_page_token="def", + model_evaluation_slices=[], + next_page_token="def", ), model_service.ListModelEvaluationSlicesResponse( model_evaluation_slices=[ @@ -2959,7 +3304,9 @@ def test_list_model_evaluation_slices_pager(): def test_list_model_evaluation_slices_pages(): - client = ModelServiceClient(credentials=credentials.AnonymousCredentials,) + client = ModelServiceClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2976,7 +3323,8 @@ def test_list_model_evaluation_slices_pages(): next_page_token="abc", ), model_service.ListModelEvaluationSlicesResponse( - model_evaluation_slices=[], next_page_token="def", + model_evaluation_slices=[], + next_page_token="def", ), model_service.ListModelEvaluationSlicesResponse( model_evaluation_slices=[ @@ -2999,7 +3347,9 @@ def test_list_model_evaluation_slices_pages(): @pytest.mark.asyncio async def test_list_model_evaluation_slices_async_pager(): - client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = ModelServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3018,7 +3368,8 @@ async def test_list_model_evaluation_slices_async_pager(): next_page_token="abc", ), model_service.ListModelEvaluationSlicesResponse( - model_evaluation_slices=[], next_page_token="def", + model_evaluation_slices=[], + next_page_token="def", ), model_service.ListModelEvaluationSlicesResponse( model_evaluation_slices=[ @@ -3034,7 +3385,9 @@ async def test_list_model_evaluation_slices_async_pager(): ), RuntimeError, ) - async_pager = await client.list_model_evaluation_slices(request={},) + async_pager = await client.list_model_evaluation_slices( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -3049,7 +3402,9 @@ async def test_list_model_evaluation_slices_async_pager(): @pytest.mark.asyncio async def test_list_model_evaluation_slices_async_pages(): - client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = ModelServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3068,7 +3423,8 @@ async def test_list_model_evaluation_slices_async_pages(): next_page_token="abc", ), model_service.ListModelEvaluationSlicesResponse( - model_evaluation_slices=[], next_page_token="def", + model_evaluation_slices=[], + next_page_token="def", ), model_service.ListModelEvaluationSlicesResponse( model_evaluation_slices=[ @@ -3100,7 +3456,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -3119,7 +3476,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = ModelServiceClient( - client_options={"scopes": ["1", "2"]}, transport=transport, + client_options={"scopes": ["1", "2"]}, + transport=transport, ) @@ -3161,8 +3519,13 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) - assert isinstance(client.transport, transports.ModelServiceGrpcTransport,) + client = ModelServiceClient( + credentials=credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ModelServiceGrpcTransport, + ) def test_model_service_base_transport_error(): @@ -3218,7 +3581,8 @@ def test_model_service_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (credentials.AnonymousCredentials(), None) transport = transports.ModelServiceTransport( - credentials_file="credentials.json", quota_project_id="octopus", + credentials_file="credentials.json", + quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -3288,7 +3652,8 @@ def test_model_service_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.ModelServiceGrpcTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -3300,7 +3665,8 @@ def test_model_service_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.ModelServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -3389,12 +3755,16 @@ def test_model_service_transport_channel_mtls_with_adc(transport_class): def test_model_service_grpc_lro_client(): client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=credentials.AnonymousCredentials(), + transport="grpc", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance(transport.operations_client, operations_v1.OperationsClient,) + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -3402,12 +3772,16 @@ def test_model_service_grpc_lro_client(): def test_model_service_grpc_lro_async_client(): client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -3419,7 +3793,9 @@ def test_endpoint_path(): endpoint = "whelk" expected = "projects/{project}/locations/{location}/endpoints/{endpoint}".format( - project=project, location=location, endpoint=endpoint, + project=project, + location=location, + endpoint=endpoint, ) actual = ModelServiceClient.endpoint_path(project, location, endpoint) assert expected == actual @@ -3444,7 +3820,9 @@ def test_model_path(): model = "winkle" expected = "projects/{project}/locations/{location}/models/{model}".format( - project=project, location=location, model=model, + project=project, + location=location, + model=model, ) actual = ModelServiceClient.model_path(project, location, model) assert expected == actual @@ -3470,7 +3848,10 @@ def test_model_evaluation_path(): evaluation = "octopus" expected = "projects/{project}/locations/{location}/models/{model}/evaluations/{evaluation}".format( - project=project, location=location, model=model, evaluation=evaluation, + project=project, + location=location, + model=model, + evaluation=evaluation, ) actual = ModelServiceClient.model_evaluation_path( project, location, model, evaluation @@ -3533,7 +3914,9 @@ def test_training_pipeline_path(): training_pipeline = "winkle" expected = "projects/{project}/locations/{location}/trainingPipelines/{training_pipeline}".format( - project=project, location=location, training_pipeline=training_pipeline, + project=project, + location=location, + training_pipeline=training_pipeline, ) actual = ModelServiceClient.training_pipeline_path( project, location, training_pipeline @@ -3578,7 +3961,9 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format(folder=folder,) + expected = "folders/{folder}".format( + folder=folder, + ) actual = ModelServiceClient.common_folder_path(folder) assert expected == actual @@ -3597,7 +3982,9 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format(organization=organization,) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = ModelServiceClient.common_organization_path(organization) assert expected == actual @@ -3616,7 +4003,9 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format(project=project,) + expected = "projects/{project}".format( + project=project, + ) actual = ModelServiceClient.common_project_path(project) assert expected == actual @@ -3637,7 +4026,8 @@ def test_common_location_path(): location = "nautilus" expected = "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) actual = ModelServiceClient.common_location_path(project, location) assert expected == actual @@ -3662,7 +4052,8 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.ModelServiceTransport, "_prep_wrapped_messages" ) as prep: client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -3671,6 +4062,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = ModelServiceClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_pipeline_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_pipeline_service.py index ada82b91c0..97e4132173 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_pipeline_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_pipeline_service.py @@ -408,7 +408,9 @@ def test_pipeline_service_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions(scopes=["1", "2"],) + options = client_options.ClientOptions( + scopes=["1", "2"], + ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) @@ -476,7 +478,8 @@ def test_create_training_pipeline( transport: str = "grpc", request_type=pipeline_service.CreateTrainingPipelineRequest ): client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -526,7 +529,8 @@ async def test_create_training_pipeline_async( request_type=pipeline_service.CreateTrainingPipelineRequest, ): client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -573,7 +577,9 @@ async def test_create_training_pipeline_async_from_dict(): def test_create_training_pipeline_field_headers(): - client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) + client = PipelineServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -595,12 +601,17 @@ def test_create_training_pipeline_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_create_training_pipeline_field_headers_async(): - client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PipelineServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -624,11 +635,16 @@ async def test_create_training_pipeline_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_create_training_pipeline_flattened(): - client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) + client = PipelineServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -657,7 +673,9 @@ def test_create_training_pipeline_flattened(): def test_create_training_pipeline_flattened_error(): - client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) + client = PipelineServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -671,7 +689,9 @@ def test_create_training_pipeline_flattened_error(): @pytest.mark.asyncio async def test_create_training_pipeline_flattened_async(): - client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PipelineServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -704,7 +724,9 @@ async def test_create_training_pipeline_flattened_async(): @pytest.mark.asyncio async def test_create_training_pipeline_flattened_error_async(): - client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PipelineServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -720,7 +742,8 @@ def test_get_training_pipeline( transport: str = "grpc", request_type=pipeline_service.GetTrainingPipelineRequest ): client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -770,7 +793,8 @@ async def test_get_training_pipeline_async( request_type=pipeline_service.GetTrainingPipelineRequest, ): client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -817,7 +841,9 @@ async def test_get_training_pipeline_async_from_dict(): def test_get_training_pipeline_field_headers(): - client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) + client = PipelineServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -839,12 +865,17 @@ def test_get_training_pipeline_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_get_training_pipeline_field_headers_async(): - client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PipelineServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -868,11 +899,16 @@ async def test_get_training_pipeline_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_get_training_pipeline_flattened(): - client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) + client = PipelineServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -883,7 +919,9 @@ def test_get_training_pipeline_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_training_pipeline(name="name_value",) + client.get_training_pipeline( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -894,19 +932,24 @@ def test_get_training_pipeline_flattened(): def test_get_training_pipeline_flattened_error(): - client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) + client = PipelineServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_training_pipeline( - pipeline_service.GetTrainingPipelineRequest(), name="name_value", + pipeline_service.GetTrainingPipelineRequest(), + name="name_value", ) @pytest.mark.asyncio async def test_get_training_pipeline_flattened_async(): - client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PipelineServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -920,7 +963,9 @@ async def test_get_training_pipeline_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_training_pipeline(name="name_value",) + response = await client.get_training_pipeline( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -932,13 +977,16 @@ async def test_get_training_pipeline_flattened_async(): @pytest.mark.asyncio async def test_get_training_pipeline_flattened_error_async(): - client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PipelineServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_training_pipeline( - pipeline_service.GetTrainingPipelineRequest(), name="name_value", + pipeline_service.GetTrainingPipelineRequest(), + name="name_value", ) @@ -946,7 +994,8 @@ def test_list_training_pipelines( transport: str = "grpc", request_type=pipeline_service.ListTrainingPipelinesRequest ): client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -987,7 +1036,8 @@ async def test_list_training_pipelines_async( request_type=pipeline_service.ListTrainingPipelinesRequest, ): client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1025,7 +1075,9 @@ async def test_list_training_pipelines_async_from_dict(): def test_list_training_pipelines_field_headers(): - client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) + client = PipelineServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1047,12 +1099,17 @@ def test_list_training_pipelines_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_list_training_pipelines_field_headers_async(): - client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PipelineServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1076,11 +1133,16 @@ async def test_list_training_pipelines_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_list_training_pipelines_flattened(): - client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) + client = PipelineServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1091,7 +1153,9 @@ def test_list_training_pipelines_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_training_pipelines(parent="parent_value",) + client.list_training_pipelines( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1102,19 +1166,24 @@ def test_list_training_pipelines_flattened(): def test_list_training_pipelines_flattened_error(): - client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) + client = PipelineServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_training_pipelines( - pipeline_service.ListTrainingPipelinesRequest(), parent="parent_value", + pipeline_service.ListTrainingPipelinesRequest(), + parent="parent_value", ) @pytest.mark.asyncio async def test_list_training_pipelines_flattened_async(): - client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PipelineServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1128,7 +1197,9 @@ async def test_list_training_pipelines_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_training_pipelines(parent="parent_value",) + response = await client.list_training_pipelines( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1140,18 +1211,23 @@ async def test_list_training_pipelines_flattened_async(): @pytest.mark.asyncio async def test_list_training_pipelines_flattened_error_async(): - client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PipelineServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_training_pipelines( - pipeline_service.ListTrainingPipelinesRequest(), parent="parent_value", + pipeline_service.ListTrainingPipelinesRequest(), + parent="parent_value", ) def test_list_training_pipelines_pager(): - client = PipelineServiceClient(credentials=credentials.AnonymousCredentials,) + client = PipelineServiceClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1168,10 +1244,13 @@ def test_list_training_pipelines_pager(): next_page_token="abc", ), pipeline_service.ListTrainingPipelinesResponse( - training_pipelines=[], next_page_token="def", + training_pipelines=[], + next_page_token="def", ), pipeline_service.ListTrainingPipelinesResponse( - training_pipelines=[training_pipeline.TrainingPipeline(),], + training_pipelines=[ + training_pipeline.TrainingPipeline(), + ], next_page_token="ghi", ), pipeline_service.ListTrainingPipelinesResponse( @@ -1197,7 +1276,9 @@ def test_list_training_pipelines_pager(): def test_list_training_pipelines_pages(): - client = PipelineServiceClient(credentials=credentials.AnonymousCredentials,) + client = PipelineServiceClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1214,10 +1295,13 @@ def test_list_training_pipelines_pages(): next_page_token="abc", ), pipeline_service.ListTrainingPipelinesResponse( - training_pipelines=[], next_page_token="def", + training_pipelines=[], + next_page_token="def", ), pipeline_service.ListTrainingPipelinesResponse( - training_pipelines=[training_pipeline.TrainingPipeline(),], + training_pipelines=[ + training_pipeline.TrainingPipeline(), + ], next_page_token="ghi", ), pipeline_service.ListTrainingPipelinesResponse( @@ -1235,7 +1319,9 @@ def test_list_training_pipelines_pages(): @pytest.mark.asyncio async def test_list_training_pipelines_async_pager(): - client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = PipelineServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1254,10 +1340,13 @@ async def test_list_training_pipelines_async_pager(): next_page_token="abc", ), pipeline_service.ListTrainingPipelinesResponse( - training_pipelines=[], next_page_token="def", + training_pipelines=[], + next_page_token="def", ), pipeline_service.ListTrainingPipelinesResponse( - training_pipelines=[training_pipeline.TrainingPipeline(),], + training_pipelines=[ + training_pipeline.TrainingPipeline(), + ], next_page_token="ghi", ), pipeline_service.ListTrainingPipelinesResponse( @@ -1268,7 +1357,9 @@ async def test_list_training_pipelines_async_pager(): ), RuntimeError, ) - async_pager = await client.list_training_pipelines(request={},) + async_pager = await client.list_training_pipelines( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -1280,7 +1371,9 @@ async def test_list_training_pipelines_async_pager(): @pytest.mark.asyncio async def test_list_training_pipelines_async_pages(): - client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = PipelineServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1299,10 +1392,13 @@ async def test_list_training_pipelines_async_pages(): next_page_token="abc", ), pipeline_service.ListTrainingPipelinesResponse( - training_pipelines=[], next_page_token="def", + training_pipelines=[], + next_page_token="def", ), pipeline_service.ListTrainingPipelinesResponse( - training_pipelines=[training_pipeline.TrainingPipeline(),], + training_pipelines=[ + training_pipeline.TrainingPipeline(), + ], next_page_token="ghi", ), pipeline_service.ListTrainingPipelinesResponse( @@ -1324,7 +1420,8 @@ def test_delete_training_pipeline( transport: str = "grpc", request_type=pipeline_service.DeleteTrainingPipelineRequest ): client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1360,7 +1457,8 @@ async def test_delete_training_pipeline_async( request_type=pipeline_service.DeleteTrainingPipelineRequest, ): client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1394,7 +1492,9 @@ async def test_delete_training_pipeline_async_from_dict(): def test_delete_training_pipeline_field_headers(): - client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) + client = PipelineServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1416,12 +1516,17 @@ def test_delete_training_pipeline_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_delete_training_pipeline_field_headers_async(): - client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PipelineServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1445,11 +1550,16 @@ async def test_delete_training_pipeline_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_delete_training_pipeline_flattened(): - client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) + client = PipelineServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1460,7 +1570,9 @@ def test_delete_training_pipeline_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_training_pipeline(name="name_value",) + client.delete_training_pipeline( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1471,19 +1583,24 @@ def test_delete_training_pipeline_flattened(): def test_delete_training_pipeline_flattened_error(): - client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) + client = PipelineServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_training_pipeline( - pipeline_service.DeleteTrainingPipelineRequest(), name="name_value", + pipeline_service.DeleteTrainingPipelineRequest(), + name="name_value", ) @pytest.mark.asyncio async def test_delete_training_pipeline_flattened_async(): - client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PipelineServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1497,7 +1614,9 @@ async def test_delete_training_pipeline_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_training_pipeline(name="name_value",) + response = await client.delete_training_pipeline( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1509,13 +1628,16 @@ async def test_delete_training_pipeline_flattened_async(): @pytest.mark.asyncio async def test_delete_training_pipeline_flattened_error_async(): - client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PipelineServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_training_pipeline( - pipeline_service.DeleteTrainingPipelineRequest(), name="name_value", + pipeline_service.DeleteTrainingPipelineRequest(), + name="name_value", ) @@ -1523,7 +1645,8 @@ def test_cancel_training_pipeline( transport: str = "grpc", request_type=pipeline_service.CancelTrainingPipelineRequest ): client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1559,7 +1682,8 @@ async def test_cancel_training_pipeline_async( request_type=pipeline_service.CancelTrainingPipelineRequest, ): client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1591,7 +1715,9 @@ async def test_cancel_training_pipeline_async_from_dict(): def test_cancel_training_pipeline_field_headers(): - client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) + client = PipelineServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1613,12 +1739,17 @@ def test_cancel_training_pipeline_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_cancel_training_pipeline_field_headers_async(): - client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PipelineServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1640,11 +1771,16 @@ async def test_cancel_training_pipeline_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_cancel_training_pipeline_flattened(): - client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) + client = PipelineServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1655,7 +1791,9 @@ def test_cancel_training_pipeline_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.cancel_training_pipeline(name="name_value",) + client.cancel_training_pipeline( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1666,19 +1804,24 @@ def test_cancel_training_pipeline_flattened(): def test_cancel_training_pipeline_flattened_error(): - client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) + client = PipelineServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.cancel_training_pipeline( - pipeline_service.CancelTrainingPipelineRequest(), name="name_value", + pipeline_service.CancelTrainingPipelineRequest(), + name="name_value", ) @pytest.mark.asyncio async def test_cancel_training_pipeline_flattened_async(): - client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PipelineServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1690,7 +1833,9 @@ async def test_cancel_training_pipeline_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.cancel_training_pipeline(name="name_value",) + response = await client.cancel_training_pipeline( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1702,13 +1847,16 @@ async def test_cancel_training_pipeline_flattened_async(): @pytest.mark.asyncio async def test_cancel_training_pipeline_flattened_error_async(): - client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PipelineServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.cancel_training_pipeline( - pipeline_service.CancelTrainingPipelineRequest(), name="name_value", + pipeline_service.CancelTrainingPipelineRequest(), + name="name_value", ) @@ -1719,7 +1867,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -1738,7 +1887,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = PipelineServiceClient( - client_options={"scopes": ["1", "2"]}, transport=transport, + client_options={"scopes": ["1", "2"]}, + transport=transport, ) @@ -1783,8 +1933,13 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) - assert isinstance(client.transport, transports.PipelineServiceGrpcTransport,) + client = PipelineServiceClient( + credentials=credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.PipelineServiceGrpcTransport, + ) def test_pipeline_service_base_transport_error(): @@ -1835,7 +1990,8 @@ def test_pipeline_service_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (credentials.AnonymousCredentials(), None) transport = transports.PipelineServiceTransport( - credentials_file="credentials.json", quota_project_id="octopus", + credentials_file="credentials.json", + quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -1905,7 +2061,8 @@ def test_pipeline_service_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.PipelineServiceGrpcTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -1917,7 +2074,8 @@ def test_pipeline_service_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.PipelineServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2014,12 +2172,16 @@ def test_pipeline_service_transport_channel_mtls_with_adc(transport_class): def test_pipeline_service_grpc_lro_client(): client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=credentials.AnonymousCredentials(), + transport="grpc", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance(transport.operations_client, operations_v1.OperationsClient,) + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -2027,12 +2189,16 @@ def test_pipeline_service_grpc_lro_client(): def test_pipeline_service_grpc_lro_async_client(): client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -2044,7 +2210,9 @@ def test_endpoint_path(): endpoint = "whelk" expected = "projects/{project}/locations/{location}/endpoints/{endpoint}".format( - project=project, location=location, endpoint=endpoint, + project=project, + location=location, + endpoint=endpoint, ) actual = PipelineServiceClient.endpoint_path(project, location, endpoint) assert expected == actual @@ -2069,7 +2237,9 @@ def test_model_path(): model = "winkle" expected = "projects/{project}/locations/{location}/models/{model}".format( - project=project, location=location, model=model, + project=project, + location=location, + model=model, ) actual = PipelineServiceClient.model_path(project, location, model) assert expected == actual @@ -2094,7 +2264,9 @@ def test_training_pipeline_path(): training_pipeline = "whelk" expected = "projects/{project}/locations/{location}/trainingPipelines/{training_pipeline}".format( - project=project, location=location, training_pipeline=training_pipeline, + project=project, + location=location, + training_pipeline=training_pipeline, ) actual = PipelineServiceClient.training_pipeline_path( project, location, training_pipeline @@ -2139,7 +2311,9 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "winkle" - expected = "folders/{folder}".format(folder=folder,) + expected = "folders/{folder}".format( + folder=folder, + ) actual = PipelineServiceClient.common_folder_path(folder) assert expected == actual @@ -2158,7 +2332,9 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "scallop" - expected = "organizations/{organization}".format(organization=organization,) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = PipelineServiceClient.common_organization_path(organization) assert expected == actual @@ -2177,7 +2353,9 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "squid" - expected = "projects/{project}".format(project=project,) + expected = "projects/{project}".format( + project=project, + ) actual = PipelineServiceClient.common_project_path(project) assert expected == actual @@ -2198,7 +2376,8 @@ def test_common_location_path(): location = "octopus" expected = "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) actual = PipelineServiceClient.common_location_path(project, location) assert expected == actual @@ -2223,7 +2402,8 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.PipelineServiceTransport, "_prep_wrapped_messages" ) as prep: client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2232,6 +2412,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = PipelineServiceClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_prediction_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_prediction_service.py index e47e0f62c5..6c9f551aa2 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_prediction_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_prediction_service.py @@ -389,7 +389,9 @@ def test_prediction_service_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions(scopes=["1", "2"],) + options = client_options.ClientOptions( + scopes=["1", "2"], + ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) @@ -457,7 +459,8 @@ def test_predict( transport: str = "grpc", request_type=prediction_service.PredictRequest ): client = PredictionServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -495,7 +498,8 @@ async def test_predict_async( transport: str = "grpc_asyncio", request_type=prediction_service.PredictRequest ): client = PredictionServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -531,7 +535,9 @@ async def test_predict_async_from_dict(): def test_predict_field_headers(): - client = PredictionServiceClient(credentials=credentials.AnonymousCredentials(),) + client = PredictionServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -551,7 +557,10 @@ def test_predict_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "endpoint=endpoint/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "endpoint=endpoint/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -580,11 +589,16 @@ async def test_predict_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "endpoint=endpoint/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "endpoint=endpoint/value", + ) in kw["metadata"] def test_predict_flattened(): - client = PredictionServiceClient(credentials=credentials.AnonymousCredentials(),) + client = PredictionServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.predict), "__call__") as call: @@ -617,7 +631,9 @@ def test_predict_flattened(): def test_predict_flattened_error(): - client = PredictionServiceClient(credentials=credentials.AnonymousCredentials(),) + client = PredictionServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -690,7 +706,8 @@ def test_explain( transport: str = "grpc", request_type=prediction_service.ExplainRequest ): client = PredictionServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -728,7 +745,8 @@ async def test_explain_async( transport: str = "grpc_asyncio", request_type=prediction_service.ExplainRequest ): client = PredictionServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -764,7 +782,9 @@ async def test_explain_async_from_dict(): def test_explain_field_headers(): - client = PredictionServiceClient(credentials=credentials.AnonymousCredentials(),) + client = PredictionServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -784,7 +804,10 @@ def test_explain_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "endpoint=endpoint/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "endpoint=endpoint/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -813,11 +836,16 @@ async def test_explain_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "endpoint=endpoint/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "endpoint=endpoint/value", + ) in kw["metadata"] def test_explain_flattened(): - client = PredictionServiceClient(credentials=credentials.AnonymousCredentials(),) + client = PredictionServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.explain), "__call__") as call: @@ -853,7 +881,9 @@ def test_explain_flattened(): def test_explain_flattened_error(): - client = PredictionServiceClient(credentials=credentials.AnonymousCredentials(),) + client = PredictionServiceClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -934,7 +964,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = PredictionServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -953,7 +984,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = PredictionServiceClient( - client_options={"scopes": ["1", "2"]}, transport=transport, + client_options={"scopes": ["1", "2"]}, + transport=transport, ) @@ -998,8 +1030,13 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = PredictionServiceClient(credentials=credentials.AnonymousCredentials(),) - assert isinstance(client.transport, transports.PredictionServiceGrpcTransport,) + client = PredictionServiceClient( + credentials=credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.PredictionServiceGrpcTransport, + ) def test_prediction_service_base_transport_error(): @@ -1042,7 +1079,8 @@ def test_prediction_service_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (credentials.AnonymousCredentials(), None) transport = transports.PredictionServiceTransport( - credentials_file="credentials.json", quota_project_id="octopus", + credentials_file="credentials.json", + quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -1112,7 +1150,8 @@ def test_prediction_service_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.PredictionServiceGrpcTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -1124,7 +1163,8 @@ def test_prediction_service_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.PredictionServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -1225,7 +1265,9 @@ def test_endpoint_path(): endpoint = "whelk" expected = "projects/{project}/locations/{location}/endpoints/{endpoint}".format( - project=project, location=location, endpoint=endpoint, + project=project, + location=location, + endpoint=endpoint, ) actual = PredictionServiceClient.endpoint_path(project, location, endpoint) assert expected == actual @@ -1268,7 +1310,9 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "winkle" - expected = "folders/{folder}".format(folder=folder,) + expected = "folders/{folder}".format( + folder=folder, + ) actual = PredictionServiceClient.common_folder_path(folder) assert expected == actual @@ -1287,7 +1331,9 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "scallop" - expected = "organizations/{organization}".format(organization=organization,) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = PredictionServiceClient.common_organization_path(organization) assert expected == actual @@ -1306,7 +1352,9 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "squid" - expected = "projects/{project}".format(project=project,) + expected = "projects/{project}".format( + project=project, + ) actual = PredictionServiceClient.common_project_path(project) assert expected == actual @@ -1327,7 +1375,8 @@ def test_common_location_path(): location = "octopus" expected = "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) actual = PredictionServiceClient.common_location_path(project, location) assert expected == actual @@ -1352,7 +1401,8 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.PredictionServiceTransport, "_prep_wrapped_messages" ) as prep: client = PredictionServiceClient( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -1361,6 +1411,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = PredictionServiceClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_specialist_pool_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_specialist_pool_service.py index 6c1061d588..e08177ca7e 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_specialist_pool_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_specialist_pool_service.py @@ -404,7 +404,9 @@ def test_specialist_pool_service_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions(scopes=["1", "2"],) + options = client_options.ClientOptions( + scopes=["1", "2"], + ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) @@ -477,7 +479,8 @@ def test_create_specialist_pool( request_type=specialist_pool_service.CreateSpecialistPoolRequest, ): client = SpecialistPoolServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -513,7 +516,8 @@ async def test_create_specialist_pool_async( request_type=specialist_pool_service.CreateSpecialistPoolRequest, ): client = SpecialistPoolServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -571,7 +575,10 @@ def test_create_specialist_pool_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -602,7 +609,10 @@ async def test_create_specialist_pool_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_create_specialist_pool_flattened(): @@ -707,7 +717,8 @@ def test_get_specialist_pool( request_type=specialist_pool_service.GetSpecialistPoolRequest, ): client = SpecialistPoolServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -760,7 +771,8 @@ async def test_get_specialist_pool_async( request_type=specialist_pool_service.GetSpecialistPoolRequest, ): client = SpecialistPoolServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -834,7 +846,10 @@ def test_get_specialist_pool_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -865,7 +880,10 @@ async def test_get_specialist_pool_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_get_specialist_pool_flattened(): @@ -882,7 +900,9 @@ def test_get_specialist_pool_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_specialist_pool(name="name_value",) + client.get_specialist_pool( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -901,7 +921,8 @@ def test_get_specialist_pool_flattened_error(): # fields is an error. with pytest.raises(ValueError): client.get_specialist_pool( - specialist_pool_service.GetSpecialistPoolRequest(), name="name_value", + specialist_pool_service.GetSpecialistPoolRequest(), + name="name_value", ) @@ -923,7 +944,9 @@ async def test_get_specialist_pool_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_specialist_pool(name="name_value",) + response = await client.get_specialist_pool( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -943,7 +966,8 @@ async def test_get_specialist_pool_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.get_specialist_pool( - specialist_pool_service.GetSpecialistPoolRequest(), name="name_value", + specialist_pool_service.GetSpecialistPoolRequest(), + name="name_value", ) @@ -952,7 +976,8 @@ def test_list_specialist_pools( request_type=specialist_pool_service.ListSpecialistPoolsRequest, ): client = SpecialistPoolServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -993,7 +1018,8 @@ async def test_list_specialist_pools_async( request_type=specialist_pool_service.ListSpecialistPoolsRequest, ): client = SpecialistPoolServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1055,7 +1081,10 @@ def test_list_specialist_pools_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1086,7 +1115,10 @@ async def test_list_specialist_pools_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_list_specialist_pools_flattened(): @@ -1103,7 +1135,9 @@ def test_list_specialist_pools_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_specialist_pools(parent="parent_value",) + client.list_specialist_pools( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1122,7 +1156,8 @@ def test_list_specialist_pools_flattened_error(): # fields is an error. with pytest.raises(ValueError): client.list_specialist_pools( - specialist_pool_service.ListSpecialistPoolsRequest(), parent="parent_value", + specialist_pool_service.ListSpecialistPoolsRequest(), + parent="parent_value", ) @@ -1144,7 +1179,9 @@ async def test_list_specialist_pools_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_specialist_pools(parent="parent_value",) + response = await client.list_specialist_pools( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1164,12 +1201,15 @@ async def test_list_specialist_pools_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.list_specialist_pools( - specialist_pool_service.ListSpecialistPoolsRequest(), parent="parent_value", + specialist_pool_service.ListSpecialistPoolsRequest(), + parent="parent_value", ) def test_list_specialist_pools_pager(): - client = SpecialistPoolServiceClient(credentials=credentials.AnonymousCredentials,) + client = SpecialistPoolServiceClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1186,10 +1226,13 @@ def test_list_specialist_pools_pager(): next_page_token="abc", ), specialist_pool_service.ListSpecialistPoolsResponse( - specialist_pools=[], next_page_token="def", + specialist_pools=[], + next_page_token="def", ), specialist_pool_service.ListSpecialistPoolsResponse( - specialist_pools=[specialist_pool.SpecialistPool(),], + specialist_pools=[ + specialist_pool.SpecialistPool(), + ], next_page_token="ghi", ), specialist_pool_service.ListSpecialistPoolsResponse( @@ -1215,7 +1258,9 @@ def test_list_specialist_pools_pager(): def test_list_specialist_pools_pages(): - client = SpecialistPoolServiceClient(credentials=credentials.AnonymousCredentials,) + client = SpecialistPoolServiceClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1232,10 +1277,13 @@ def test_list_specialist_pools_pages(): next_page_token="abc", ), specialist_pool_service.ListSpecialistPoolsResponse( - specialist_pools=[], next_page_token="def", + specialist_pools=[], + next_page_token="def", ), specialist_pool_service.ListSpecialistPoolsResponse( - specialist_pools=[specialist_pool.SpecialistPool(),], + specialist_pools=[ + specialist_pool.SpecialistPool(), + ], next_page_token="ghi", ), specialist_pool_service.ListSpecialistPoolsResponse( @@ -1274,10 +1322,13 @@ async def test_list_specialist_pools_async_pager(): next_page_token="abc", ), specialist_pool_service.ListSpecialistPoolsResponse( - specialist_pools=[], next_page_token="def", + specialist_pools=[], + next_page_token="def", ), specialist_pool_service.ListSpecialistPoolsResponse( - specialist_pools=[specialist_pool.SpecialistPool(),], + specialist_pools=[ + specialist_pool.SpecialistPool(), + ], next_page_token="ghi", ), specialist_pool_service.ListSpecialistPoolsResponse( @@ -1288,7 +1339,9 @@ async def test_list_specialist_pools_async_pager(): ), RuntimeError, ) - async_pager = await client.list_specialist_pools(request={},) + async_pager = await client.list_specialist_pools( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -1321,10 +1374,13 @@ async def test_list_specialist_pools_async_pages(): next_page_token="abc", ), specialist_pool_service.ListSpecialistPoolsResponse( - specialist_pools=[], next_page_token="def", + specialist_pools=[], + next_page_token="def", ), specialist_pool_service.ListSpecialistPoolsResponse( - specialist_pools=[specialist_pool.SpecialistPool(),], + specialist_pools=[ + specialist_pool.SpecialistPool(), + ], next_page_token="ghi", ), specialist_pool_service.ListSpecialistPoolsResponse( @@ -1347,7 +1403,8 @@ def test_delete_specialist_pool( request_type=specialist_pool_service.DeleteSpecialistPoolRequest, ): client = SpecialistPoolServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1383,7 +1440,8 @@ async def test_delete_specialist_pool_async( request_type=specialist_pool_service.DeleteSpecialistPoolRequest, ): client = SpecialistPoolServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1441,7 +1499,10 @@ def test_delete_specialist_pool_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1472,7 +1533,10 @@ async def test_delete_specialist_pool_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_delete_specialist_pool_flattened(): @@ -1489,7 +1553,9 @@ def test_delete_specialist_pool_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_specialist_pool(name="name_value",) + client.delete_specialist_pool( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1508,7 +1574,8 @@ def test_delete_specialist_pool_flattened_error(): # fields is an error. with pytest.raises(ValueError): client.delete_specialist_pool( - specialist_pool_service.DeleteSpecialistPoolRequest(), name="name_value", + specialist_pool_service.DeleteSpecialistPoolRequest(), + name="name_value", ) @@ -1530,7 +1597,9 @@ async def test_delete_specialist_pool_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_specialist_pool(name="name_value",) + response = await client.delete_specialist_pool( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1550,7 +1619,8 @@ async def test_delete_specialist_pool_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.delete_specialist_pool( - specialist_pool_service.DeleteSpecialistPoolRequest(), name="name_value", + specialist_pool_service.DeleteSpecialistPoolRequest(), + name="name_value", ) @@ -1559,7 +1629,8 @@ def test_update_specialist_pool( request_type=specialist_pool_service.UpdateSpecialistPoolRequest, ): client = SpecialistPoolServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1595,7 +1666,8 @@ async def test_update_specialist_pool_async( request_type=specialist_pool_service.UpdateSpecialistPoolRequest, ): client = SpecialistPoolServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1797,7 +1869,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = SpecialistPoolServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -1816,7 +1889,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = SpecialistPoolServiceClient( - client_options={"scopes": ["1", "2"]}, transport=transport, + client_options={"scopes": ["1", "2"]}, + transport=transport, ) @@ -1864,7 +1938,10 @@ def test_transport_grpc_default(): client = SpecialistPoolServiceClient( credentials=credentials.AnonymousCredentials(), ) - assert isinstance(client.transport, transports.SpecialistPoolServiceGrpcTransport,) + assert isinstance( + client.transport, + transports.SpecialistPoolServiceGrpcTransport, + ) def test_specialist_pool_service_base_transport_error(): @@ -1915,7 +1992,8 @@ def test_specialist_pool_service_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (credentials.AnonymousCredentials(), None) transport = transports.SpecialistPoolServiceTransport( - credentials_file="credentials.json", quota_project_id="octopus", + credentials_file="credentials.json", + quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -1985,7 +2063,8 @@ def test_specialist_pool_service_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.SpecialistPoolServiceGrpcTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -1997,7 +2076,8 @@ def test_specialist_pool_service_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.SpecialistPoolServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2094,12 +2174,16 @@ def test_specialist_pool_service_transport_channel_mtls_with_adc(transport_class def test_specialist_pool_service_grpc_lro_client(): client = SpecialistPoolServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=credentials.AnonymousCredentials(), + transport="grpc", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance(transport.operations_client, operations_v1.OperationsClient,) + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -2107,12 +2191,16 @@ def test_specialist_pool_service_grpc_lro_client(): def test_specialist_pool_service_grpc_lro_async_client(): client = SpecialistPoolServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -2124,7 +2212,9 @@ def test_specialist_pool_path(): specialist_pool = "whelk" expected = "projects/{project}/locations/{location}/specialistPools/{specialist_pool}".format( - project=project, location=location, specialist_pool=specialist_pool, + project=project, + location=location, + specialist_pool=specialist_pool, ) actual = SpecialistPoolServiceClient.specialist_pool_path( project, location, specialist_pool @@ -2169,7 +2259,9 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "winkle" - expected = "folders/{folder}".format(folder=folder,) + expected = "folders/{folder}".format( + folder=folder, + ) actual = SpecialistPoolServiceClient.common_folder_path(folder) assert expected == actual @@ -2188,7 +2280,9 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "scallop" - expected = "organizations/{organization}".format(organization=organization,) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = SpecialistPoolServiceClient.common_organization_path(organization) assert expected == actual @@ -2207,7 +2301,9 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "squid" - expected = "projects/{project}".format(project=project,) + expected = "projects/{project}".format( + project=project, + ) actual = SpecialistPoolServiceClient.common_project_path(project) assert expected == actual @@ -2228,7 +2324,8 @@ def test_common_location_path(): location = "octopus" expected = "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) actual = SpecialistPoolServiceClient.common_location_path(project, location) assert expected == actual @@ -2253,7 +2350,8 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.SpecialistPoolServiceTransport, "_prep_wrapped_messages" ) as prep: client = SpecialistPoolServiceClient( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2262,6 +2360,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = SpecialistPoolServiceClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) From f79925ca2a086e46bc2e08599615d34a17b6a0a3 Mon Sep 17 00:00:00 2001 From: Eric Schmidt Date: Tue, 15 Dec 2020 15:20:24 -0800 Subject: [PATCH 2/5] fix: more lint --- .../services/dataset_service/transports/base.py | 8 ++++---- .../dataset_service/transports/grpc_asyncio.py | 8 ++++---- .../services/endpoint_service/transports/base.py | 8 ++++---- .../endpoint_service/transports/grpc_asyncio.py | 8 ++++---- .../services/job_service/transports/base.py | 8 ++++---- .../job_service/transports/grpc_asyncio.py | 8 ++++---- .../services/migration_service/transports/base.py | 8 ++++---- .../migration_service/transports/grpc_asyncio.py | 8 ++++---- .../services/model_service/transports/base.py | 8 ++++---- .../model_service/transports/grpc_asyncio.py | 8 ++++---- .../services/pipeline_service/transports/base.py | 8 ++++---- .../pipeline_service/transports/grpc_asyncio.py | 8 ++++---- .../prediction_service/transports/base.py | 8 ++++---- .../prediction_service/transports/grpc_asyncio.py | 8 ++++---- .../specialist_pool_service/transports/base.py | 8 ++++---- .../transports/grpc_asyncio.py | 8 ++++---- noxfile.py | 15 +++++++++++---- 17 files changed, 75 insertions(+), 68 deletions(-) diff --git a/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/base.py index 56f567959a..583e9864cd 100644 --- a/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/base.py @@ -74,10 +74,10 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. diff --git a/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/grpc_asyncio.py index 1f22b10f3e..aff766aa24 100644 --- a/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/grpc_asyncio.py @@ -137,10 +137,10 @@ def __init__( for grpc channel. It is ignored if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: diff --git a/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/base.py index e55589de8f..88b2b17c57 100644 --- a/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/base.py @@ -73,10 +73,10 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. diff --git a/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/grpc_asyncio.py index f4e362281b..661c63e9b9 100644 --- a/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/grpc_asyncio.py @@ -136,10 +136,10 @@ def __init__( for grpc channel. It is ignored if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: diff --git a/google/cloud/aiplatform_v1beta1/services/job_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/job_service/transports/base.py index 3d1f0be59b..abedda51f9 100644 --- a/google/cloud/aiplatform_v1beta1/services/job_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/job_service/transports/base.py @@ -86,10 +86,10 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. diff --git a/google/cloud/aiplatform_v1beta1/services/job_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1beta1/services/job_service/transports/grpc_asyncio.py index 83cc826484..7d203c8d18 100644 --- a/google/cloud/aiplatform_v1beta1/services/job_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/job_service/transports/grpc_asyncio.py @@ -151,10 +151,10 @@ def __init__( for grpc channel. It is ignored if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: diff --git a/google/cloud/aiplatform_v1beta1/services/migration_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/migration_service/transports/base.py index cbcb288489..e5feef70cf 100644 --- a/google/cloud/aiplatform_v1beta1/services/migration_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/migration_service/transports/base.py @@ -71,10 +71,10 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. diff --git a/google/cloud/aiplatform_v1beta1/services/migration_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1beta1/services/migration_service/transports/grpc_asyncio.py index ba038f57c5..969d1a3b12 100644 --- a/google/cloud/aiplatform_v1beta1/services/migration_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/migration_service/transports/grpc_asyncio.py @@ -138,10 +138,10 @@ def __init__( for grpc channel. It is ignored if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: diff --git a/google/cloud/aiplatform_v1beta1/services/model_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/model_service/transports/base.py index 2f87fc98dd..a0b896cdf4 100644 --- a/google/cloud/aiplatform_v1beta1/services/model_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/model_service/transports/base.py @@ -75,10 +75,10 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. diff --git a/google/cloud/aiplatform_v1beta1/services/model_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1beta1/services/model_service/transports/grpc_asyncio.py index 13e9848290..bce1fed9a6 100644 --- a/google/cloud/aiplatform_v1beta1/services/model_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/model_service/transports/grpc_asyncio.py @@ -140,10 +140,10 @@ def __init__( for grpc channel. It is ignored if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: diff --git a/google/cloud/aiplatform_v1beta1/services/pipeline_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/pipeline_service/transports/base.py index 41123b8615..25e8acb412 100644 --- a/google/cloud/aiplatform_v1beta1/services/pipeline_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/pipeline_service/transports/base.py @@ -76,10 +76,10 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. diff --git a/google/cloud/aiplatform_v1beta1/services/pipeline_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1beta1/services/pipeline_service/transports/grpc_asyncio.py index 2e6f51e1a3..173c771eab 100644 --- a/google/cloud/aiplatform_v1beta1/services/pipeline_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/pipeline_service/transports/grpc_asyncio.py @@ -141,10 +141,10 @@ def __init__( for grpc channel. It is ignored if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: diff --git a/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/base.py index 0c82f7d83c..f2f7a028cc 100644 --- a/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/base.py @@ -69,10 +69,10 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. diff --git a/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/grpc_asyncio.py index a0785007db..237fa8a75c 100644 --- a/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/grpc_asyncio.py @@ -134,10 +134,10 @@ def __init__( for grpc channel. It is ignored if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: diff --git a/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/transports/base.py index f1af058030..a39c2f1f71 100644 --- a/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/transports/base.py @@ -72,10 +72,10 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. diff --git a/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/transports/grpc_asyncio.py b/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/transports/grpc_asyncio.py index 7d038edc4f..a6d3b045e6 100644 --- a/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/transports/grpc_asyncio.py +++ b/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/transports/grpc_asyncio.py @@ -142,10 +142,10 @@ def __init__( for grpc channel. It is ignored if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: diff --git a/noxfile.py b/noxfile.py index 87765339b5..295cac5eb5 100644 --- a/noxfile.py +++ b/noxfile.py @@ -40,7 +40,9 @@ def lint(session): """ session.install("flake8", BLACK_VERSION) session.run( - "black", "--check", *BLACK_PATHS, + "black", + "--check", + *BLACK_PATHS, ) session.run("flake8", "google", "tests") @@ -57,7 +59,8 @@ def blacken(session): """ session.install(BLACK_VERSION) session.run( - "black", *BLACK_PATHS, + "black", + *BLACK_PATHS, ) @@ -73,7 +76,9 @@ def default(session): session.install("asyncmock", "pytest-asyncio") session.install( - "mock", "pytest", "pytest-cov", + "mock", + "pytest", + "pytest-cov", ) session.install("-e", ".") @@ -123,7 +128,9 @@ def system(session): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. session.install( - "mock", "pytest", "google-cloud-testutils", + "mock", + "pytest", + "google-cloud-testutils", ) session.install("-e", ".") From fccb0e4e8cced320657262c45f5ff56dff5e1016 Mon Sep 17 00:00:00 2001 From: Eric Schmidt Date: Tue, 15 Dec 2020 15:54:34 -0800 Subject: [PATCH 3/5] fix: lint --- .../dataset_service/transports/base.py | 40 ++++++++++++++----- .../endpoint_service/transports/base.py | 28 +++++++++---- .../services/job_service/transports/base.py | 20 +++++++--- .../services/model_service/transports/base.py | 28 +++++++++---- .../prediction_service/transports/base.py | 8 +++- .../transports/base.py | 4 +- 6 files changed, 96 insertions(+), 32 deletions(-) diff --git a/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/base.py index 583e9864cd..6d7a5dc0c3 100644 --- a/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/base.py @@ -112,34 +112,54 @@ def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { self.create_dataset: gapic_v1.method.wrap_method( - self.create_dataset, default_timeout=5.0, client_info=client_info, + self.create_dataset, + default_timeout=5.0, + client_info=client_info, ), self.get_dataset: gapic_v1.method.wrap_method( - self.get_dataset, default_timeout=5.0, client_info=client_info, + self.get_dataset, + default_timeout=5.0, + client_info=client_info, ), self.update_dataset: gapic_v1.method.wrap_method( - self.update_dataset, default_timeout=5.0, client_info=client_info, + self.update_dataset, + default_timeout=5.0, + client_info=client_info, ), self.list_datasets: gapic_v1.method.wrap_method( - self.list_datasets, default_timeout=5.0, client_info=client_info, + self.list_datasets, + default_timeout=5.0, + client_info=client_info, ), self.delete_dataset: gapic_v1.method.wrap_method( - self.delete_dataset, default_timeout=5.0, client_info=client_info, + self.delete_dataset, + default_timeout=5.0, + client_info=client_info, ), self.import_data: gapic_v1.method.wrap_method( - self.import_data, default_timeout=5.0, client_info=client_info, + self.import_data, + default_timeout=5.0, + client_info=client_info, ), self.export_data: gapic_v1.method.wrap_method( - self.export_data, default_timeout=5.0, client_info=client_info, + self.export_data, + default_timeout=5.0, + client_info=client_info, ), self.list_data_items: gapic_v1.method.wrap_method( - self.list_data_items, default_timeout=5.0, client_info=client_info, + self.list_data_items, + default_timeout=5.0, + client_info=client_info, ), self.get_annotation_spec: gapic_v1.method.wrap_method( - self.get_annotation_spec, default_timeout=5.0, client_info=client_info, + self.get_annotation_spec, + default_timeout=5.0, + client_info=client_info, ), self.list_annotations: gapic_v1.method.wrap_method( - self.list_annotations, default_timeout=5.0, client_info=client_info, + self.list_annotations, + default_timeout=5.0, + client_info=client_info, ), } diff --git a/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/base.py index 88b2b17c57..8608b11624 100644 --- a/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/base.py @@ -111,25 +111,39 @@ def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { self.create_endpoint: gapic_v1.method.wrap_method( - self.create_endpoint, default_timeout=5.0, client_info=client_info, + self.create_endpoint, + default_timeout=5.0, + client_info=client_info, ), self.get_endpoint: gapic_v1.method.wrap_method( - self.get_endpoint, default_timeout=5.0, client_info=client_info, + self.get_endpoint, + default_timeout=5.0, + client_info=client_info, ), self.list_endpoints: gapic_v1.method.wrap_method( - self.list_endpoints, default_timeout=5.0, client_info=client_info, + self.list_endpoints, + default_timeout=5.0, + client_info=client_info, ), self.update_endpoint: gapic_v1.method.wrap_method( - self.update_endpoint, default_timeout=5.0, client_info=client_info, + self.update_endpoint, + default_timeout=5.0, + client_info=client_info, ), self.delete_endpoint: gapic_v1.method.wrap_method( - self.delete_endpoint, default_timeout=5.0, client_info=client_info, + self.delete_endpoint, + default_timeout=5.0, + client_info=client_info, ), self.deploy_model: gapic_v1.method.wrap_method( - self.deploy_model, default_timeout=5.0, client_info=client_info, + self.deploy_model, + default_timeout=5.0, + client_info=client_info, ), self.undeploy_model: gapic_v1.method.wrap_method( - self.undeploy_model, default_timeout=5.0, client_info=client_info, + self.undeploy_model, + default_timeout=5.0, + client_info=client_info, ), } diff --git a/google/cloud/aiplatform_v1beta1/services/job_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/job_service/transports/base.py index abedda51f9..6ac6330d01 100644 --- a/google/cloud/aiplatform_v1beta1/services/job_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/job_service/transports/base.py @@ -124,19 +124,29 @@ def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { self.create_custom_job: gapic_v1.method.wrap_method( - self.create_custom_job, default_timeout=5.0, client_info=client_info, + self.create_custom_job, + default_timeout=5.0, + client_info=client_info, ), self.get_custom_job: gapic_v1.method.wrap_method( - self.get_custom_job, default_timeout=5.0, client_info=client_info, + self.get_custom_job, + default_timeout=5.0, + client_info=client_info, ), self.list_custom_jobs: gapic_v1.method.wrap_method( - self.list_custom_jobs, default_timeout=5.0, client_info=client_info, + self.list_custom_jobs, + default_timeout=5.0, + client_info=client_info, ), self.delete_custom_job: gapic_v1.method.wrap_method( - self.delete_custom_job, default_timeout=5.0, client_info=client_info, + self.delete_custom_job, + default_timeout=5.0, + client_info=client_info, ), self.cancel_custom_job: gapic_v1.method.wrap_method( - self.cancel_custom_job, default_timeout=5.0, client_info=client_info, + self.cancel_custom_job, + default_timeout=5.0, + client_info=client_info, ), self.create_data_labeling_job: gapic_v1.method.wrap_method( self.create_data_labeling_job, diff --git a/google/cloud/aiplatform_v1beta1/services/model_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/model_service/transports/base.py index a0b896cdf4..17e7a98018 100644 --- a/google/cloud/aiplatform_v1beta1/services/model_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/model_service/transports/base.py @@ -113,25 +113,39 @@ def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { self.upload_model: gapic_v1.method.wrap_method( - self.upload_model, default_timeout=5.0, client_info=client_info, + self.upload_model, + default_timeout=5.0, + client_info=client_info, ), self.get_model: gapic_v1.method.wrap_method( - self.get_model, default_timeout=5.0, client_info=client_info, + self.get_model, + default_timeout=5.0, + client_info=client_info, ), self.list_models: gapic_v1.method.wrap_method( - self.list_models, default_timeout=5.0, client_info=client_info, + self.list_models, + default_timeout=5.0, + client_info=client_info, ), self.update_model: gapic_v1.method.wrap_method( - self.update_model, default_timeout=5.0, client_info=client_info, + self.update_model, + default_timeout=5.0, + client_info=client_info, ), self.delete_model: gapic_v1.method.wrap_method( - self.delete_model, default_timeout=5.0, client_info=client_info, + self.delete_model, + default_timeout=5.0, + client_info=client_info, ), self.export_model: gapic_v1.method.wrap_method( - self.export_model, default_timeout=5.0, client_info=client_info, + self.export_model, + default_timeout=5.0, + client_info=client_info, ), self.get_model_evaluation: gapic_v1.method.wrap_method( - self.get_model_evaluation, default_timeout=5.0, client_info=client_info, + self.get_model_evaluation, + default_timeout=5.0, + client_info=client_info, ), self.list_model_evaluations: gapic_v1.method.wrap_method( self.list_model_evaluations, diff --git a/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/base.py index f2f7a028cc..739153f493 100644 --- a/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/base.py @@ -107,10 +107,14 @@ def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { self.predict: gapic_v1.method.wrap_method( - self.predict, default_timeout=5.0, client_info=client_info, + self.predict, + default_timeout=5.0, + client_info=client_info, ), self.explain: gapic_v1.method.wrap_method( - self.explain, default_timeout=5.0, client_info=client_info, + self.explain, + default_timeout=5.0, + client_info=client_info, ), } diff --git a/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/transports/base.py index a39c2f1f71..e4de291be3 100644 --- a/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/transports/base.py @@ -115,7 +115,9 @@ def _prep_wrapped_messages(self, client_info): client_info=client_info, ), self.get_specialist_pool: gapic_v1.method.wrap_method( - self.get_specialist_pool, default_timeout=5.0, client_info=client_info, + self.get_specialist_pool, + default_timeout=5.0, + client_info=client_info, ), self.list_specialist_pools: gapic_v1.method.wrap_method( self.list_specialist_pools, From 2ae075cc94adf77decf20e0d04e0cf1c77a249fb Mon Sep 17 00:00:00 2001 From: Bu Sun Kim Date: Wed, 16 Dec 2020 00:13:02 +0000 Subject: [PATCH 4/5] chore: blacken --- docs/conf.py | 5 +- .../types/image_classification.py | 4 +- .../types/image_object_detection.py | 4 +- .../types/image_segmentation.py | 4 +- .../types/text_classification.py | 4 +- .../instance_v1beta1/types/text_extraction.py | 4 +- .../instance_v1beta1/types/text_sentiment.py | 4 +- .../types/video_action_recognition.py | 4 +- .../types/video_classification.py | 4 +- .../types/video_object_tracking.py | 4 +- .../types/image_classification.py | 4 +- .../types/image_object_detection.py | 4 +- .../types/image_segmentation.py | 4 +- .../types/video_action_recognition.py | 4 +- .../types/video_classification.py | 4 +- .../types/video_object_tracking.py | 4 +- .../types/classification.py | 4 +- .../types/image_object_detection.py | 10 +- .../types/image_segmentation.py | 4 +- .../types/tabular_classification.py | 4 +- .../types/tabular_regression.py | 4 +- .../types/text_extraction.py | 4 +- .../types/text_sentiment.py | 10 +- .../types/time_series_forecasting.py | 4 +- .../types/video_action_recognition.py | 20 +- .../types/video_classification.py | 20 +- .../types/video_object_tracking.py | 64 +- .../types/automl_forecasting.py | 20 +- .../types/automl_image_classification.py | 18 +- .../types/automl_image_object_detection.py | 18 +- .../types/automl_image_segmentation.py | 18 +- .../definition_v1beta1/types/automl_tables.py | 22 +- .../types/automl_text_classification.py | 9 +- .../types/automl_text_extraction.py | 11 +- .../types/automl_text_sentiment.py | 11 +- .../types/automl_video_action_recognition.py | 15 +- .../types/automl_video_classification.py | 15 +- .../types/automl_video_object_tracking.py | 15 +- .../export_evaluated_data_items_config.py | 4 +- .../services/dataset_service/async_client.py | 85 +- .../services/dataset_service/client.py | 157 +- .../dataset_service/transports/base.py | 40 +- .../services/endpoint_service/async_client.py | 54 +- .../services/endpoint_service/client.py | 115 +- .../endpoint_service/transports/base.py | 28 +- .../services/job_service/async_client.py | 152 +- .../services/job_service/client.py | 239 +-- .../services/job_service/transports/base.py | 20 +- .../migration_service/async_client.py | 19 +- .../services/migration_service/client.py | 126 +- .../services/model_service/async_client.py | 85 +- .../services/model_service/client.py | 170 +-- .../services/model_service/transports/base.py | 28 +- .../services/pipeline_service/async_client.py | 38 +- .../services/pipeline_service/client.py | 107 +- .../prediction_service/async_client.py | 14 +- .../services/prediction_service/client.py | 63 +- .../prediction_service/transports/base.py | 8 +- .../specialist_pool_service/async_client.py | 40 +- .../specialist_pool_service/client.py | 89 +- .../transports/base.py | 4 +- .../types/accelerator_type.py | 5 +- .../aiplatform_v1beta1/types/annotation.py | 27 +- .../types/annotation_spec.py | 19 +- .../types/batch_prediction_job.py | 100 +- .../types/completion_stats.py | 5 +- .../aiplatform_v1beta1/types/custom_job.py | 80 +- .../aiplatform_v1beta1/types/data_item.py | 23 +- .../types/data_labeling_job.py | 58 +- .../cloud/aiplatform_v1beta1/types/dataset.py | 34 +- .../types/dataset_service.py | 80 +- .../types/deployed_model_ref.py | 5 +- .../aiplatform_v1beta1/types/endpoint.py | 32 +- .../types/endpoint_service.py | 48 +- .../cloud/aiplatform_v1beta1/types/env_var.py | 5 +- .../aiplatform_v1beta1/types/explanation.py | 54 +- .../types/explanation_metadata.py | 32 +- .../types/hyperparameter_tuning_job.py | 57 +- .../aiplatform_v1beta1/types/job_service.py | 50 +- .../aiplatform_v1beta1/types/job_state.py | 5 +- .../types/machine_resources.py | 16 +- .../types/manual_batch_tuning_parameters.py | 4 +- .../types/migratable_resource.py | 33 +- .../types/migration_service.py | 30 +- .../cloud/aiplatform_v1beta1/types/model.py | 69 +- .../types/model_evaluation.py | 21 +- .../types/model_evaluation_slice.py | 25 +- .../aiplatform_v1beta1/types/model_service.py | 78 +- .../aiplatform_v1beta1/types/operation.py | 25 +- .../types/pipeline_service.py | 14 +- .../types/pipeline_state.py | 5 +- .../types/prediction_service.py | 40 +- .../types/specialist_pool.py | 5 +- .../types/specialist_pool_service.py | 32 +- .../cloud/aiplatform_v1beta1/types/study.py | 88 +- .../types/training_pipeline.py | 90 +- .../types/user_action_reference.py | 5 +- noxfile.py | 15 +- .../test_dataset_service.py | 824 +++------- .../test_endpoint_service.py | 477 ++---- .../aiplatform_v1beta1/test_job_service.py | 1323 +++++------------ .../test_migration_service.py | 210 +-- .../aiplatform_v1beta1/test_model_service.py | 788 +++------- .../test_pipeline_service.py | 381 ++--- .../test_prediction_service.py | 109 +- .../test_specialist_pool_service.py | 217 +-- 106 files changed, 1770 insertions(+), 5943 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index a6e4da0270..98e68be241 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -347,10 +347,7 @@ intersphinx_mapping = { "python": ("https://python.readthedocs.org/en/latest/", None), "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), - "google.api_core": ( - "https://googleapis.dev/python/google-api-core/latest/", - None, - ), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), "grpc": ("https://grpc.github.io/grpc/python/", None), "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), } diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/image_classification.py b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/image_classification.py index c484150e69..84b1ef0bbe 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/image_classification.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/image_classification.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.instance", - manifest={ - "ImageClassificationPredictionInstance", - }, + manifest={"ImageClassificationPredictionInstance",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/image_object_detection.py b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/image_object_detection.py index 8455fa581c..79c3efc2c6 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/image_object_detection.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/image_object_detection.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.instance", - manifest={ - "ImageObjectDetectionPredictionInstance", - }, + manifest={"ImageObjectDetectionPredictionInstance",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/image_segmentation.py b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/image_segmentation.py index 497b67b691..5a3232c6d2 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/image_segmentation.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/image_segmentation.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.instance", - manifest={ - "ImageSegmentationPredictionInstance", - }, + manifest={"ImageSegmentationPredictionInstance",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/text_classification.py b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/text_classification.py index 4f196ac220..a615dc7e49 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/text_classification.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/text_classification.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.instance", - manifest={ - "TextClassificationPredictionInstance", - }, + manifest={"TextClassificationPredictionInstance",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/text_extraction.py b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/text_extraction.py index 1077f8b8d7..c6fecf80b7 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/text_extraction.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/text_extraction.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.instance", - manifest={ - "TextExtractionPredictionInstance", - }, + manifest={"TextExtractionPredictionInstance",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/text_sentiment.py b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/text_sentiment.py index 00bd62fdeb..69836d0e96 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/text_sentiment.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/text_sentiment.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.instance", - manifest={ - "TextSentimentPredictionInstance", - }, + manifest={"TextSentimentPredictionInstance",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/video_action_recognition.py b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/video_action_recognition.py index 0e6d5afd6e..89be6318f8 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/video_action_recognition.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/video_action_recognition.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.instance", - manifest={ - "VideoActionRecognitionPredictionInstance", - }, + manifest={"VideoActionRecognitionPredictionInstance",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/video_classification.py b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/video_classification.py index 32c0dff2f7..41ab3bc217 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/video_classification.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/video_classification.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.instance", - manifest={ - "VideoClassificationPredictionInstance", - }, + manifest={"VideoClassificationPredictionInstance",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/video_object_tracking.py b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/video_object_tracking.py index 4c6d0714bb..3729c14816 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/video_object_tracking.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/video_object_tracking.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.instance", - manifest={ - "VideoObjectTrackingPredictionInstance", - }, + manifest={"VideoObjectTrackingPredictionInstance",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/image_classification.py b/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/image_classification.py index b8deb2a0c6..681a8c3d87 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/image_classification.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/image_classification.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.params", - manifest={ - "ImageClassificationPredictionParams", - }, + manifest={"ImageClassificationPredictionParams",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/image_object_detection.py b/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/image_object_detection.py index 13bf3059b9..146dd324b7 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/image_object_detection.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/image_object_detection.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.params", - manifest={ - "ImageObjectDetectionPredictionParams", - }, + manifest={"ImageObjectDetectionPredictionParams",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/image_segmentation.py b/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/image_segmentation.py index 3e24237e86..aa11739a61 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/image_segmentation.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/image_segmentation.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.params", - manifest={ - "ImageSegmentationPredictionParams", - }, + manifest={"ImageSegmentationPredictionParams",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/video_action_recognition.py b/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/video_action_recognition.py index 7d8d6e1a82..c1f8f9f3bc 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/video_action_recognition.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/video_action_recognition.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.params", - manifest={ - "VideoActionRecognitionPredictionParams", - }, + manifest={"VideoActionRecognitionPredictionParams",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/video_classification.py b/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/video_classification.py index 80149d426b..1b8d84a7d1 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/video_classification.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/video_classification.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.params", - manifest={ - "VideoClassificationPredictionParams", - }, + manifest={"VideoClassificationPredictionParams",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/video_object_tracking.py b/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/video_object_tracking.py index 8aa3ff8384..4c0b6846bc 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/video_object_tracking.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/video_object_tracking.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.params", - manifest={ - "VideoObjectTrackingPredictionParams", - }, + manifest={"VideoObjectTrackingPredictionParams",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/classification.py b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/classification.py index 850779b6b7..3bfe82f64e 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/classification.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/classification.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.prediction", - manifest={ - "ClassificationPredictionResult", - }, + manifest={"ClassificationPredictionResult",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/image_object_detection.py b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/image_object_detection.py index 08cd977503..1bf5002c2a 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/image_object_detection.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/image_object_detection.py @@ -23,9 +23,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.prediction", - manifest={ - "ImageObjectDetectionPredictionResult", - }, + manifest={"ImageObjectDetectionPredictionResult",}, ) @@ -60,11 +58,7 @@ class ImageObjectDetectionPredictionResult(proto.Message): confidences = proto.RepeatedField(proto.FLOAT, number=3) - bboxes = proto.RepeatedField( - proto.MESSAGE, - number=4, - message=struct.ListValue, - ) + bboxes = proto.RepeatedField(proto.MESSAGE, number=4, message=struct.ListValue,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/image_segmentation.py b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/image_segmentation.py index a92a3805a3..195dea6f79 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/image_segmentation.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/image_segmentation.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.prediction", - manifest={ - "ImageSegmentationPredictionResult", - }, + manifest={"ImageSegmentationPredictionResult",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/tabular_classification.py b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/tabular_classification.py index 759329db4b..4906ad59a5 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/tabular_classification.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/tabular_classification.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.prediction", - manifest={ - "TabularClassificationPredictionResult", - }, + manifest={"TabularClassificationPredictionResult",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/tabular_regression.py b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/tabular_regression.py index ed7851e3bd..71d535c1f0 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/tabular_regression.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/tabular_regression.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.prediction", - manifest={ - "TabularRegressionPredictionResult", - }, + manifest={"TabularRegressionPredictionResult",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/text_extraction.py b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/text_extraction.py index 5450db2ffb..e3c10b5d75 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/text_extraction.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/text_extraction.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.prediction", - manifest={ - "TextExtractionPredictionResult", - }, + manifest={"TextExtractionPredictionResult",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/text_sentiment.py b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/text_sentiment.py index fcd296366f..192e50419d 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/text_sentiment.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/text_sentiment.py @@ -23,9 +23,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.prediction", - manifest={ - "TextSentimentPredictionResult", - }, + manifest={"TextSentimentPredictionResult",}, ) @@ -64,11 +62,7 @@ class Prediction(proto.Message): message=gcaspi_text_sentiment.TextSentimentPredictionInstance, ) - prediction = proto.Field( - proto.MESSAGE, - number=2, - message=Prediction, - ) + prediction = proto.Field(proto.MESSAGE, number=2, message=Prediction,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/time_series_forecasting.py b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/time_series_forecasting.py index eb30436beb..38bd8e3c85 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/time_series_forecasting.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/time_series_forecasting.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.prediction", - manifest={ - "TimeSeriesForecastingPredictionResult", - }, + manifest={"TimeSeriesForecastingPredictionResult",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/video_action_recognition.py b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/video_action_recognition.py index 8105e21a87..f76b51899b 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/video_action_recognition.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/video_action_recognition.py @@ -24,9 +24,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.prediction", - manifest={ - "VideoActionRecognitionPredictionResult", - }, + manifest={"VideoActionRecognitionPredictionResult",}, ) @@ -65,22 +63,12 @@ class VideoActionRecognitionPredictionResult(proto.Message): display_name = proto.Field(proto.STRING, number=2) time_segment_start = proto.Field( - proto.MESSAGE, - number=4, - message=duration.Duration, + proto.MESSAGE, number=4, message=duration.Duration, ) - time_segment_end = proto.Field( - proto.MESSAGE, - number=5, - message=duration.Duration, - ) + time_segment_end = proto.Field(proto.MESSAGE, number=5, message=duration.Duration,) - confidence = proto.Field( - proto.MESSAGE, - number=6, - message=wrappers.FloatValue, - ) + confidence = proto.Field(proto.MESSAGE, number=6, message=wrappers.FloatValue,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/video_classification.py b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/video_classification.py index dbee575ef5..469023b122 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/video_classification.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/video_classification.py @@ -24,9 +24,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.prediction", - manifest={ - "VideoClassificationPredictionResult", - }, + manifest={"VideoClassificationPredictionResult",}, ) @@ -81,22 +79,12 @@ class VideoClassificationPredictionResult(proto.Message): type_ = proto.Field(proto.STRING, number=3) time_segment_start = proto.Field( - proto.MESSAGE, - number=4, - message=duration.Duration, + proto.MESSAGE, number=4, message=duration.Duration, ) - time_segment_end = proto.Field( - proto.MESSAGE, - number=5, - message=duration.Duration, - ) + time_segment_end = proto.Field(proto.MESSAGE, number=5, message=duration.Duration,) - confidence = proto.Field( - proto.MESSAGE, - number=6, - message=wrappers.FloatValue, - ) + confidence = proto.Field(proto.MESSAGE, number=6, message=wrappers.FloatValue,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/video_object_tracking.py b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/video_object_tracking.py index 2a05724028..026f80a325 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/video_object_tracking.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/video_object_tracking.py @@ -24,9 +24,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.prediction", - manifest={ - "VideoObjectTrackingPredictionResult", - }, + manifest={"VideoObjectTrackingPredictionResult",}, ) @@ -89,63 +87,29 @@ class Frame(proto.Message): box. """ - time_offset = proto.Field( - proto.MESSAGE, - number=1, - message=duration.Duration, - ) - - x_min = proto.Field( - proto.MESSAGE, - number=2, - message=wrappers.FloatValue, - ) - - x_max = proto.Field( - proto.MESSAGE, - number=3, - message=wrappers.FloatValue, - ) - - y_min = proto.Field( - proto.MESSAGE, - number=4, - message=wrappers.FloatValue, - ) - - y_max = proto.Field( - proto.MESSAGE, - number=5, - message=wrappers.FloatValue, - ) + time_offset = proto.Field(proto.MESSAGE, number=1, message=duration.Duration,) + + x_min = proto.Field(proto.MESSAGE, number=2, message=wrappers.FloatValue,) + + x_max = proto.Field(proto.MESSAGE, number=3, message=wrappers.FloatValue,) + + y_min = proto.Field(proto.MESSAGE, number=4, message=wrappers.FloatValue,) + + y_max = proto.Field(proto.MESSAGE, number=5, message=wrappers.FloatValue,) id = proto.Field(proto.STRING, number=1) display_name = proto.Field(proto.STRING, number=2) time_segment_start = proto.Field( - proto.MESSAGE, - number=3, - message=duration.Duration, + proto.MESSAGE, number=3, message=duration.Duration, ) - time_segment_end = proto.Field( - proto.MESSAGE, - number=4, - message=duration.Duration, - ) + time_segment_end = proto.Field(proto.MESSAGE, number=4, message=duration.Duration,) - confidence = proto.Field( - proto.MESSAGE, - number=5, - message=wrappers.FloatValue, - ) + confidence = proto.Field(proto.MESSAGE, number=5, message=wrappers.FloatValue,) - frames = proto.RepeatedField( - proto.MESSAGE, - number=6, - message=Frame, - ) + frames = proto.RepeatedField(proto.MESSAGE, number=6, message=Frame,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_forecasting.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_forecasting.py index 337138d774..40c549dc5f 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_forecasting.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_forecasting.py @@ -44,16 +44,10 @@ class AutoMlForecasting(proto.Message): The metadata information. """ - inputs = proto.Field( - proto.MESSAGE, - number=1, - message="AutoMlForecastingInputs", - ) + inputs = proto.Field(proto.MESSAGE, number=1, message="AutoMlForecastingInputs",) metadata = proto.Field( - proto.MESSAGE, - number=2, - message="AutoMlForecastingMetadata", + proto.MESSAGE, number=2, message="AutoMlForecastingMetadata", ) @@ -445,9 +439,7 @@ class Period(proto.Message): time_column = proto.Field(proto.STRING, number=3) transformations = proto.RepeatedField( - proto.MESSAGE, - number=4, - message=Transformation, + proto.MESSAGE, number=4, message=Transformation, ) optimization_objective = proto.Field(proto.STRING, number=5) @@ -462,11 +454,7 @@ class Period(proto.Message): time_variant_past_and_future_columns = proto.RepeatedField(proto.STRING, number=10) - period = proto.Field( - proto.MESSAGE, - number=11, - message=Period, - ) + period = proto.Field(proto.MESSAGE, number=11, message=Period,) forecast_window_start = proto.Field(proto.INT64, number=12) diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_classification.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_classification.py index 57fb8fd17c..0ee0394192 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_classification.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_classification.py @@ -40,15 +40,11 @@ class AutoMlImageClassification(proto.Message): """ inputs = proto.Field( - proto.MESSAGE, - number=1, - message="AutoMlImageClassificationInputs", + proto.MESSAGE, number=1, message="AutoMlImageClassificationInputs", ) metadata = proto.Field( - proto.MESSAGE, - number=2, - message="AutoMlImageClassificationMetadata", + proto.MESSAGE, number=2, message="AutoMlImageClassificationMetadata", ) @@ -105,11 +101,7 @@ class ModelType(proto.Enum): MOBILE_TF_VERSATILE_1 = 3 MOBILE_TF_HIGH_ACCURACY_1 = 4 - model_type = proto.Field( - proto.ENUM, - number=1, - enum=ModelType, - ) + model_type = proto.Field(proto.ENUM, number=1, enum=ModelType,) base_model_id = proto.Field(proto.STRING, number=2) @@ -144,9 +136,7 @@ class SuccessfulStopReason(proto.Enum): cost_milli_node_hours = proto.Field(proto.INT64, number=1) successful_stop_reason = proto.Field( - proto.ENUM, - number=2, - enum=SuccessfulStopReason, + proto.ENUM, number=2, enum=SuccessfulStopReason, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_object_detection.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_object_detection.py index 420e4a4a31..3fb9d3ae1d 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_object_detection.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_object_detection.py @@ -40,15 +40,11 @@ class AutoMlImageObjectDetection(proto.Message): """ inputs = proto.Field( - proto.MESSAGE, - number=1, - message="AutoMlImageObjectDetectionInputs", + proto.MESSAGE, number=1, message="AutoMlImageObjectDetectionInputs", ) metadata = proto.Field( - proto.MESSAGE, - number=2, - message="AutoMlImageObjectDetectionMetadata", + proto.MESSAGE, number=2, message="AutoMlImageObjectDetectionMetadata", ) @@ -94,11 +90,7 @@ class ModelType(proto.Enum): MOBILE_TF_VERSATILE_1 = 4 MOBILE_TF_HIGH_ACCURACY_1 = 5 - model_type = proto.Field( - proto.ENUM, - number=1, - enum=ModelType, - ) + model_type = proto.Field(proto.ENUM, number=1, enum=ModelType,) budget_milli_node_hours = proto.Field(proto.INT64, number=2) @@ -129,9 +121,7 @@ class SuccessfulStopReason(proto.Enum): cost_milli_node_hours = proto.Field(proto.INT64, number=1) successful_stop_reason = proto.Field( - proto.ENUM, - number=2, - enum=SuccessfulStopReason, + proto.ENUM, number=2, enum=SuccessfulStopReason, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_segmentation.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_segmentation.py index c767f4272b..0fa3788b11 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_segmentation.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_segmentation.py @@ -40,15 +40,11 @@ class AutoMlImageSegmentation(proto.Message): """ inputs = proto.Field( - proto.MESSAGE, - number=1, - message="AutoMlImageSegmentationInputs", + proto.MESSAGE, number=1, message="AutoMlImageSegmentationInputs", ) metadata = proto.Field( - proto.MESSAGE, - number=2, - message="AutoMlImageSegmentationMetadata", + proto.MESSAGE, number=2, message="AutoMlImageSegmentationMetadata", ) @@ -87,11 +83,7 @@ class ModelType(proto.Enum): CLOUD_HIGH_ACCURACY_1 = 1 CLOUD_LOW_ACCURACY_1 = 2 - model_type = proto.Field( - proto.ENUM, - number=1, - enum=ModelType, - ) + model_type = proto.Field(proto.ENUM, number=1, enum=ModelType,) budget_milli_node_hours = proto.Field(proto.INT64, number=2) @@ -122,9 +114,7 @@ class SuccessfulStopReason(proto.Enum): cost_milli_node_hours = proto.Field(proto.INT64, number=1) successful_stop_reason = proto.Field( - proto.ENUM, - number=2, - enum=SuccessfulStopReason, + proto.ENUM, number=2, enum=SuccessfulStopReason, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_tables.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_tables.py index 362b3613fd..55d620b32e 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_tables.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_tables.py @@ -25,11 +25,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.trainingjob.definition", - manifest={ - "AutoMlTables", - "AutoMlTablesInputs", - "AutoMlTablesMetadata", - }, + manifest={"AutoMlTables", "AutoMlTablesInputs", "AutoMlTablesMetadata",}, ) @@ -43,17 +39,9 @@ class AutoMlTables(proto.Message): The metadata information. """ - inputs = proto.Field( - proto.MESSAGE, - number=1, - message="AutoMlTablesInputs", - ) + inputs = proto.Field(proto.MESSAGE, number=1, message="AutoMlTablesInputs",) - metadata = proto.Field( - proto.MESSAGE, - number=2, - message="AutoMlTablesMetadata", - ) + metadata = proto.Field(proto.MESSAGE, number=2, message="AutoMlTablesMetadata",) class AutoMlTablesInputs(proto.Message): @@ -424,9 +412,7 @@ class TextArrayTransformation(proto.Message): target_column = proto.Field(proto.STRING, number=2) transformations = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=Transformation, + proto.MESSAGE, number=3, message=Transformation, ) optimization_objective = proto.Field(proto.STRING, number=4) diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_text_classification.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_text_classification.py index 8b7c29d198..ca75734600 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_text_classification.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_text_classification.py @@ -20,10 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.trainingjob.definition", - manifest={ - "AutoMlTextClassification", - "AutoMlTextClassificationInputs", - }, + manifest={"AutoMlTextClassification", "AutoMlTextClassificationInputs",}, ) @@ -37,9 +34,7 @@ class AutoMlTextClassification(proto.Message): """ inputs = proto.Field( - proto.MESSAGE, - number=1, - message="AutoMlTextClassificationInputs", + proto.MESSAGE, number=1, message="AutoMlTextClassificationInputs", ) diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_text_extraction.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_text_extraction.py index c1e44e4630..336509af22 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_text_extraction.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_text_extraction.py @@ -20,10 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.trainingjob.definition", - manifest={ - "AutoMlTextExtraction", - "AutoMlTextExtractionInputs", - }, + manifest={"AutoMlTextExtraction", "AutoMlTextExtractionInputs",}, ) @@ -36,11 +33,7 @@ class AutoMlTextExtraction(proto.Message): The input parameters of this TrainingJob. """ - inputs = proto.Field( - proto.MESSAGE, - number=1, - message="AutoMlTextExtractionInputs", - ) + inputs = proto.Field(proto.MESSAGE, number=1, message="AutoMlTextExtractionInputs",) class AutoMlTextExtractionInputs(proto.Message): diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_text_sentiment.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_text_sentiment.py index d1b936a361..d5de97e2b2 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_text_sentiment.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_text_sentiment.py @@ -20,10 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.trainingjob.definition", - manifest={ - "AutoMlTextSentiment", - "AutoMlTextSentimentInputs", - }, + manifest={"AutoMlTextSentiment", "AutoMlTextSentimentInputs",}, ) @@ -36,11 +33,7 @@ class AutoMlTextSentiment(proto.Message): The input parameters of this TrainingJob. """ - inputs = proto.Field( - proto.MESSAGE, - number=1, - message="AutoMlTextSentimentInputs", - ) + inputs = proto.Field(proto.MESSAGE, number=1, message="AutoMlTextSentimentInputs",) class AutoMlTextSentimentInputs(proto.Message): diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_action_recognition.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_action_recognition.py index 0c5ae5f629..d6969d93c6 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_action_recognition.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_action_recognition.py @@ -20,10 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.trainingjob.definition", - manifest={ - "AutoMlVideoActionRecognition", - "AutoMlVideoActionRecognitionInputs", - }, + manifest={"AutoMlVideoActionRecognition", "AutoMlVideoActionRecognitionInputs",}, ) @@ -37,9 +34,7 @@ class AutoMlVideoActionRecognition(proto.Message): """ inputs = proto.Field( - proto.MESSAGE, - number=1, - message="AutoMlVideoActionRecognitionInputs", + proto.MESSAGE, number=1, message="AutoMlVideoActionRecognitionInputs", ) @@ -57,11 +52,7 @@ class ModelType(proto.Enum): CLOUD = 1 MOBILE_VERSATILE_1 = 2 - model_type = proto.Field( - proto.ENUM, - number=1, - enum=ModelType, - ) + model_type = proto.Field(proto.ENUM, number=1, enum=ModelType,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_classification.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_classification.py index 4e06caf015..3164544d47 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_classification.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_classification.py @@ -20,10 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.trainingjob.definition", - manifest={ - "AutoMlVideoClassification", - "AutoMlVideoClassificationInputs", - }, + manifest={"AutoMlVideoClassification", "AutoMlVideoClassificationInputs",}, ) @@ -37,9 +34,7 @@ class AutoMlVideoClassification(proto.Message): """ inputs = proto.Field( - proto.MESSAGE, - number=1, - message="AutoMlVideoClassificationInputs", + proto.MESSAGE, number=1, message="AutoMlVideoClassificationInputs", ) @@ -57,11 +52,7 @@ class ModelType(proto.Enum): CLOUD = 1 MOBILE_VERSATILE_1 = 2 - model_type = proto.Field( - proto.ENUM, - number=1, - enum=ModelType, - ) + model_type = proto.Field(proto.ENUM, number=1, enum=ModelType,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_object_tracking.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_object_tracking.py index e351db59d3..0fd8c7ec7a 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_object_tracking.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_object_tracking.py @@ -20,10 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.trainingjob.definition", - manifest={ - "AutoMlVideoObjectTracking", - "AutoMlVideoObjectTrackingInputs", - }, + manifest={"AutoMlVideoObjectTracking", "AutoMlVideoObjectTrackingInputs",}, ) @@ -37,9 +34,7 @@ class AutoMlVideoObjectTracking(proto.Message): """ inputs = proto.Field( - proto.MESSAGE, - number=1, - message="AutoMlVideoObjectTrackingInputs", + proto.MESSAGE, number=1, message="AutoMlVideoObjectTrackingInputs", ) @@ -61,11 +56,7 @@ class ModelType(proto.Enum): MOBILE_JETSON_VERSATILE_1 = 5 MOBILE_JETSON_LOW_LATENCY_1 = 6 - model_type = proto.Field( - proto.ENUM, - number=1, - enum=ModelType, - ) + model_type = proto.Field(proto.ENUM, number=1, enum=ModelType,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/export_evaluated_data_items_config.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/export_evaluated_data_items_config.py index 4d8070c737..29bc547adf 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/export_evaluated_data_items_config.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/export_evaluated_data_items_config.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.trainingjob.definition", - manifest={ - "ExportEvaluatedDataItemsConfig", - }, + manifest={"ExportEvaluatedDataItemsConfig",}, ) diff --git a/google/cloud/aiplatform_v1beta1/services/dataset_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/dataset_service/async_client.py index d0139b3003..1927709f30 100644 --- a/google/cloud/aiplatform_v1beta1/services/dataset_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/dataset_service/async_client.py @@ -239,12 +239,7 @@ async def create_dataset( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -324,12 +319,7 @@ async def get_dataset( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -419,12 +409,7 @@ async def update_dataset( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -499,20 +484,12 @@ async def list_datasets( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListDatasetsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -601,12 +578,7 @@ async def delete_dataset( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -701,12 +673,7 @@ async def import_data( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -799,12 +766,7 @@ async def export_data( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -888,20 +850,12 @@ async def list_data_items( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListDataItemsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -976,12 +930,7 @@ async def get_annotation_spec( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -1058,20 +1007,12 @@ async def list_annotations( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListAnnotationsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. diff --git a/google/cloud/aiplatform_v1beta1/services/dataset_service/client.py b/google/cloud/aiplatform_v1beta1/services/dataset_service/client.py index b97aa5385c..1e63153291 100644 --- a/google/cloud/aiplatform_v1beta1/services/dataset_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/dataset_service/client.py @@ -66,10 +66,7 @@ class DatasetServiceClientMeta(type): _transport_registry["grpc"] = DatasetServiceGrpcTransport _transport_registry["grpc_asyncio"] = DatasetServiceGrpcAsyncIOTransport - def get_transport_class( - cls, - label: str = None, - ) -> Type[DatasetServiceTransport]: + def get_transport_class(cls, label: str = None,) -> Type[DatasetServiceTransport]: """Return an appropriate transport class. Args: @@ -156,11 +153,7 @@ def transport(self) -> DatasetServiceTransport: @staticmethod def annotation_path( - project: str, - location: str, - dataset: str, - data_item: str, - annotation: str, + project: str, location: str, dataset: str, data_item: str, annotation: str, ) -> str: """Return a fully-qualified annotation string.""" return "projects/{project}/locations/{location}/datasets/{dataset}/dataItems/{data_item}/annotations/{annotation}".format( @@ -182,10 +175,7 @@ def parse_annotation_path(path: str) -> Dict[str, str]: @staticmethod def annotation_spec_path( - project: str, - location: str, - dataset: str, - annotation_spec: str, + project: str, location: str, dataset: str, annotation_spec: str, ) -> str: """Return a fully-qualified annotation_spec string.""" return "projects/{project}/locations/{location}/datasets/{dataset}/annotationSpecs/{annotation_spec}".format( @@ -206,17 +196,11 @@ def parse_annotation_spec_path(path: str) -> Dict[str, str]: @staticmethod def data_item_path( - project: str, - location: str, - dataset: str, - data_item: str, + project: str, location: str, dataset: str, data_item: str, ) -> str: """Return a fully-qualified data_item string.""" return "projects/{project}/locations/{location}/datasets/{dataset}/dataItems/{data_item}".format( - project=project, - location=location, - dataset=dataset, - data_item=data_item, + project=project, location=location, dataset=dataset, data_item=data_item, ) @staticmethod @@ -229,16 +213,10 @@ def parse_data_item_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def dataset_path( - project: str, - location: str, - dataset: str, - ) -> str: + def dataset_path(project: str, location: str, dataset: str,) -> str: """Return a fully-qualified dataset string.""" return "projects/{project}/locations/{location}/datasets/{dataset}".format( - project=project, - location=location, - dataset=dataset, + project=project, location=location, dataset=dataset, ) @staticmethod @@ -251,9 +229,7 @@ def parse_dataset_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_billing_account_path( - billing_account: str, - ) -> str: + def common_billing_account_path(billing_account: str,) -> str: """Return a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -266,13 +242,9 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path( - folder: str, - ) -> str: + def common_folder_path(folder: str,) -> str: """Return a fully-qualified folder string.""" - return "folders/{folder}".format( - folder=folder, - ) + return "folders/{folder}".format(folder=folder,) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -281,13 +253,9 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path( - organization: str, - ) -> str: + def common_organization_path(organization: str,) -> str: """Return a fully-qualified organization string.""" - return "organizations/{organization}".format( - organization=organization, - ) + return "organizations/{organization}".format(organization=organization,) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -296,13 +264,9 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path( - project: str, - ) -> str: + def common_project_path(project: str,) -> str: """Return a fully-qualified project string.""" - return "projects/{project}".format( - project=project, - ) + return "projects/{project}".format(project=project,) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -311,14 +275,10 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path( - project: str, - location: str, - ) -> str: + def common_location_path(project: str, location: str,) -> str: """Return a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, - location=location, + project=project, location=location, ) @staticmethod @@ -524,12 +484,7 @@ def create_dataset( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -610,12 +565,7 @@ def get_dataset( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -706,12 +656,7 @@ def update_dataset( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -787,20 +732,12 @@ def list_datasets( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListDatasetsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -890,12 +827,7 @@ def delete_dataset( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -991,12 +923,7 @@ def import_data( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -1090,12 +1017,7 @@ def export_data( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -1180,20 +1102,12 @@ def list_data_items( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListDataItemsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -1269,12 +1183,7 @@ def get_annotation_spec( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -1352,20 +1261,12 @@ def list_annotations( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListAnnotationsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. diff --git a/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/base.py index 6d7a5dc0c3..583e9864cd 100644 --- a/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/base.py @@ -112,54 +112,34 @@ def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { self.create_dataset: gapic_v1.method.wrap_method( - self.create_dataset, - default_timeout=5.0, - client_info=client_info, + self.create_dataset, default_timeout=5.0, client_info=client_info, ), self.get_dataset: gapic_v1.method.wrap_method( - self.get_dataset, - default_timeout=5.0, - client_info=client_info, + self.get_dataset, default_timeout=5.0, client_info=client_info, ), self.update_dataset: gapic_v1.method.wrap_method( - self.update_dataset, - default_timeout=5.0, - client_info=client_info, + self.update_dataset, default_timeout=5.0, client_info=client_info, ), self.list_datasets: gapic_v1.method.wrap_method( - self.list_datasets, - default_timeout=5.0, - client_info=client_info, + self.list_datasets, default_timeout=5.0, client_info=client_info, ), self.delete_dataset: gapic_v1.method.wrap_method( - self.delete_dataset, - default_timeout=5.0, - client_info=client_info, + self.delete_dataset, default_timeout=5.0, client_info=client_info, ), self.import_data: gapic_v1.method.wrap_method( - self.import_data, - default_timeout=5.0, - client_info=client_info, + self.import_data, default_timeout=5.0, client_info=client_info, ), self.export_data: gapic_v1.method.wrap_method( - self.export_data, - default_timeout=5.0, - client_info=client_info, + self.export_data, default_timeout=5.0, client_info=client_info, ), self.list_data_items: gapic_v1.method.wrap_method( - self.list_data_items, - default_timeout=5.0, - client_info=client_info, + self.list_data_items, default_timeout=5.0, client_info=client_info, ), self.get_annotation_spec: gapic_v1.method.wrap_method( - self.get_annotation_spec, - default_timeout=5.0, - client_info=client_info, + self.get_annotation_spec, default_timeout=5.0, client_info=client_info, ), self.list_annotations: gapic_v1.method.wrap_method( - self.list_annotations, - default_timeout=5.0, - client_info=client_info, + self.list_annotations, default_timeout=5.0, client_info=client_info, ), } diff --git a/google/cloud/aiplatform_v1beta1/services/endpoint_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/endpoint_service/async_client.py index 3afd01ea0c..9c6af3bd16 100644 --- a/google/cloud/aiplatform_v1beta1/services/endpoint_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/endpoint_service/async_client.py @@ -230,12 +230,7 @@ async def create_endpoint( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -316,12 +311,7 @@ async def get_endpoint( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -397,20 +387,12 @@ async def list_endpoints( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListEndpointsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -495,12 +477,7 @@ async def update_endpoint( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -588,12 +565,7 @@ async def delete_endpoint( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -717,12 +689,7 @@ async def deploy_model( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -837,12 +804,7 @@ async def undeploy_model( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( diff --git a/google/cloud/aiplatform_v1beta1/services/endpoint_service/client.py b/google/cloud/aiplatform_v1beta1/services/endpoint_service/client.py index 28a8f6ab78..5ea003b827 100644 --- a/google/cloud/aiplatform_v1beta1/services/endpoint_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/endpoint_service/client.py @@ -62,10 +62,7 @@ class EndpointServiceClientMeta(type): _transport_registry["grpc"] = EndpointServiceGrpcTransport _transport_registry["grpc_asyncio"] = EndpointServiceGrpcAsyncIOTransport - def get_transport_class( - cls, - label: str = None, - ) -> Type[EndpointServiceTransport]: + def get_transport_class(cls, label: str = None,) -> Type[EndpointServiceTransport]: """Return an appropriate transport class. Args: @@ -151,16 +148,10 @@ def transport(self) -> EndpointServiceTransport: return self._transport @staticmethod - def endpoint_path( - project: str, - location: str, - endpoint: str, - ) -> str: + def endpoint_path(project: str, location: str, endpoint: str,) -> str: """Return a fully-qualified endpoint string.""" return "projects/{project}/locations/{location}/endpoints/{endpoint}".format( - project=project, - location=location, - endpoint=endpoint, + project=project, location=location, endpoint=endpoint, ) @staticmethod @@ -173,16 +164,10 @@ def parse_endpoint_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def model_path( - project: str, - location: str, - model: str, - ) -> str: + def model_path(project: str, location: str, model: str,) -> str: """Return a fully-qualified model string.""" return "projects/{project}/locations/{location}/models/{model}".format( - project=project, - location=location, - model=model, + project=project, location=location, model=model, ) @staticmethod @@ -195,9 +180,7 @@ def parse_model_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_billing_account_path( - billing_account: str, - ) -> str: + def common_billing_account_path(billing_account: str,) -> str: """Return a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -210,13 +193,9 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path( - folder: str, - ) -> str: + def common_folder_path(folder: str,) -> str: """Return a fully-qualified folder string.""" - return "folders/{folder}".format( - folder=folder, - ) + return "folders/{folder}".format(folder=folder,) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -225,13 +204,9 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path( - organization: str, - ) -> str: + def common_organization_path(organization: str,) -> str: """Return a fully-qualified organization string.""" - return "organizations/{organization}".format( - organization=organization, - ) + return "organizations/{organization}".format(organization=organization,) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -240,13 +215,9 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path( - project: str, - ) -> str: + def common_project_path(project: str,) -> str: """Return a fully-qualified project string.""" - return "projects/{project}".format( - project=project, - ) + return "projects/{project}".format(project=project,) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -255,14 +226,10 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path( - project: str, - location: str, - ) -> str: + def common_location_path(project: str, location: str,) -> str: """Return a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, - location=location, + project=project, location=location, ) @staticmethod @@ -469,12 +436,7 @@ def create_endpoint( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -556,12 +518,7 @@ def get_endpoint( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -638,20 +595,12 @@ def list_endpoints( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListEndpointsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -737,12 +686,7 @@ def update_endpoint( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -831,12 +775,7 @@ def delete_endpoint( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -961,12 +900,7 @@ def deploy_model( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -1082,12 +1016,7 @@ def undeploy_model( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = ga_operation.from_gapic( diff --git a/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/base.py index 8608b11624..88b2b17c57 100644 --- a/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/base.py @@ -111,39 +111,25 @@ def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { self.create_endpoint: gapic_v1.method.wrap_method( - self.create_endpoint, - default_timeout=5.0, - client_info=client_info, + self.create_endpoint, default_timeout=5.0, client_info=client_info, ), self.get_endpoint: gapic_v1.method.wrap_method( - self.get_endpoint, - default_timeout=5.0, - client_info=client_info, + self.get_endpoint, default_timeout=5.0, client_info=client_info, ), self.list_endpoints: gapic_v1.method.wrap_method( - self.list_endpoints, - default_timeout=5.0, - client_info=client_info, + self.list_endpoints, default_timeout=5.0, client_info=client_info, ), self.update_endpoint: gapic_v1.method.wrap_method( - self.update_endpoint, - default_timeout=5.0, - client_info=client_info, + self.update_endpoint, default_timeout=5.0, client_info=client_info, ), self.delete_endpoint: gapic_v1.method.wrap_method( - self.delete_endpoint, - default_timeout=5.0, - client_info=client_info, + self.delete_endpoint, default_timeout=5.0, client_info=client_info, ), self.deploy_model: gapic_v1.method.wrap_method( - self.deploy_model, - default_timeout=5.0, - client_info=client_info, + self.deploy_model, default_timeout=5.0, client_info=client_info, ), self.undeploy_model: gapic_v1.method.wrap_method( - self.undeploy_model, - default_timeout=5.0, - client_info=client_info, + self.undeploy_model, default_timeout=5.0, client_info=client_info, ), } diff --git a/google/cloud/aiplatform_v1beta1/services/job_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/job_service/async_client.py index 258cd49a51..2a24748d11 100644 --- a/google/cloud/aiplatform_v1beta1/services/job_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/job_service/async_client.py @@ -263,12 +263,7 @@ async def create_custom_job( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -346,12 +341,7 @@ async def get_custom_job( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -427,20 +417,12 @@ async def list_custom_jobs( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListCustomJobsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -529,12 +511,7 @@ async def delete_custom_job( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -621,10 +598,7 @@ async def cancel_custom_job( # Send the request. await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, + request, retry=retry, timeout=timeout, metadata=metadata, ) async def create_data_labeling_job( @@ -704,12 +678,7 @@ async def create_data_labeling_job( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -783,12 +752,7 @@ async def get_data_labeling_job( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -863,20 +827,12 @@ async def list_data_labeling_jobs( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListDataLabelingJobsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -966,12 +922,7 @@ async def delete_data_labeling_job( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -1048,10 +999,7 @@ async def cancel_data_labeling_job( # Send the request. await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, + request, retry=retry, timeout=timeout, metadata=metadata, ) async def create_hyperparameter_tuning_job( @@ -1133,12 +1081,7 @@ async def create_hyperparameter_tuning_job( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -1214,12 +1157,7 @@ async def get_hyperparameter_tuning_job( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -1295,20 +1233,12 @@ async def list_hyperparameter_tuning_jobs( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListHyperparameterTuningJobsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -1398,12 +1328,7 @@ async def delete_hyperparameter_tuning_job( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -1493,10 +1418,7 @@ async def cancel_hyperparameter_tuning_job( # Send the request. await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, + request, retry=retry, timeout=timeout, metadata=metadata, ) async def create_batch_prediction_job( @@ -1582,12 +1504,7 @@ async def create_batch_prediction_job( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -1666,12 +1583,7 @@ async def get_batch_prediction_job( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -1747,20 +1659,12 @@ async def list_batch_prediction_jobs( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListBatchPredictionJobsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -1851,12 +1755,7 @@ async def delete_batch_prediction_job( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -1944,10 +1843,7 @@ async def cancel_batch_prediction_job( # Send the request. await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, + request, retry=retry, timeout=timeout, metadata=metadata, ) diff --git a/google/cloud/aiplatform_v1beta1/services/job_service/client.py b/google/cloud/aiplatform_v1beta1/services/job_service/client.py index 9e73a6bf73..a1eb7c38ce 100644 --- a/google/cloud/aiplatform_v1beta1/services/job_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/job_service/client.py @@ -80,10 +80,7 @@ class JobServiceClientMeta(type): _transport_registry["grpc"] = JobServiceGrpcTransport _transport_registry["grpc_asyncio"] = JobServiceGrpcAsyncIOTransport - def get_transport_class( - cls, - label: str = None, - ) -> Type[JobServiceTransport]: + def get_transport_class(cls, label: str = None,) -> Type[JobServiceTransport]: """Return an appropriate transport class. Args: @@ -170,9 +167,7 @@ def transport(self) -> JobServiceTransport: @staticmethod def batch_prediction_job_path( - project: str, - location: str, - batch_prediction_job: str, + project: str, location: str, batch_prediction_job: str, ) -> str: """Return a fully-qualified batch_prediction_job string.""" return "projects/{project}/locations/{location}/batchPredictionJobs/{batch_prediction_job}".format( @@ -191,16 +186,10 @@ def parse_batch_prediction_job_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def custom_job_path( - project: str, - location: str, - custom_job: str, - ) -> str: + def custom_job_path(project: str, location: str, custom_job: str,) -> str: """Return a fully-qualified custom_job string.""" return "projects/{project}/locations/{location}/customJobs/{custom_job}".format( - project=project, - location=location, - custom_job=custom_job, + project=project, location=location, custom_job=custom_job, ) @staticmethod @@ -214,15 +203,11 @@ def parse_custom_job_path(path: str) -> Dict[str, str]: @staticmethod def data_labeling_job_path( - project: str, - location: str, - data_labeling_job: str, + project: str, location: str, data_labeling_job: str, ) -> str: """Return a fully-qualified data_labeling_job string.""" return "projects/{project}/locations/{location}/dataLabelingJobs/{data_labeling_job}".format( - project=project, - location=location, - data_labeling_job=data_labeling_job, + project=project, location=location, data_labeling_job=data_labeling_job, ) @staticmethod @@ -235,16 +220,10 @@ def parse_data_labeling_job_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def dataset_path( - project: str, - location: str, - dataset: str, - ) -> str: + def dataset_path(project: str, location: str, dataset: str,) -> str: """Return a fully-qualified dataset string.""" return "projects/{project}/locations/{location}/datasets/{dataset}".format( - project=project, - location=location, - dataset=dataset, + project=project, location=location, dataset=dataset, ) @staticmethod @@ -258,9 +237,7 @@ def parse_dataset_path(path: str) -> Dict[str, str]: @staticmethod def hyperparameter_tuning_job_path( - project: str, - location: str, - hyperparameter_tuning_job: str, + project: str, location: str, hyperparameter_tuning_job: str, ) -> str: """Return a fully-qualified hyperparameter_tuning_job string.""" return "projects/{project}/locations/{location}/hyperparameterTuningJobs/{hyperparameter_tuning_job}".format( @@ -279,16 +256,10 @@ def parse_hyperparameter_tuning_job_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def model_path( - project: str, - location: str, - model: str, - ) -> str: + def model_path(project: str, location: str, model: str,) -> str: """Return a fully-qualified model string.""" return "projects/{project}/locations/{location}/models/{model}".format( - project=project, - location=location, - model=model, + project=project, location=location, model=model, ) @staticmethod @@ -301,9 +272,7 @@ def parse_model_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_billing_account_path( - billing_account: str, - ) -> str: + def common_billing_account_path(billing_account: str,) -> str: """Return a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -316,13 +285,9 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path( - folder: str, - ) -> str: + def common_folder_path(folder: str,) -> str: """Return a fully-qualified folder string.""" - return "folders/{folder}".format( - folder=folder, - ) + return "folders/{folder}".format(folder=folder,) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -331,13 +296,9 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path( - organization: str, - ) -> str: + def common_organization_path(organization: str,) -> str: """Return a fully-qualified organization string.""" - return "organizations/{organization}".format( - organization=organization, - ) + return "organizations/{organization}".format(organization=organization,) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -346,13 +307,9 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path( - project: str, - ) -> str: + def common_project_path(project: str,) -> str: """Return a fully-qualified project string.""" - return "projects/{project}".format( - project=project, - ) + return "projects/{project}".format(project=project,) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -361,14 +318,10 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path( - project: str, - location: str, - ) -> str: + def common_location_path(project: str, location: str,) -> str: """Return a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, - location=location, + project=project, location=location, ) @staticmethod @@ -578,12 +531,7 @@ def create_custom_job( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -662,12 +610,7 @@ def get_custom_job( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -744,20 +687,12 @@ def list_custom_jobs( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListCustomJobsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -847,12 +782,7 @@ def delete_custom_job( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -940,10 +870,7 @@ def cancel_custom_job( # Send the request. rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, + request, retry=retry, timeout=timeout, metadata=metadata, ) def create_data_labeling_job( @@ -1024,12 +951,7 @@ def create_data_labeling_job( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -1104,12 +1026,7 @@ def get_data_labeling_job( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -1185,20 +1102,12 @@ def list_data_labeling_jobs( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListDataLabelingJobsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -1289,12 +1198,7 @@ def delete_data_labeling_job( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -1372,10 +1276,7 @@ def cancel_data_labeling_job( # Send the request. rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, + request, retry=retry, timeout=timeout, metadata=metadata, ) def create_hyperparameter_tuning_job( @@ -1460,12 +1361,7 @@ def create_hyperparameter_tuning_job( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -1544,12 +1440,7 @@ def get_hyperparameter_tuning_job( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -1628,20 +1519,12 @@ def list_hyperparameter_tuning_jobs( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListHyperparameterTuningJobsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -1734,12 +1617,7 @@ def delete_hyperparameter_tuning_job( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -1832,10 +1710,7 @@ def cancel_hyperparameter_tuning_job( # Send the request. rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, + request, retry=retry, timeout=timeout, metadata=metadata, ) def create_batch_prediction_job( @@ -1924,12 +1799,7 @@ def create_batch_prediction_job( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -2009,12 +1879,7 @@ def get_batch_prediction_job( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -2093,20 +1958,12 @@ def list_batch_prediction_jobs( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListBatchPredictionJobsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -2200,12 +2057,7 @@ def delete_batch_prediction_job( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -2296,10 +2148,7 @@ def cancel_batch_prediction_job( # Send the request. rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, + request, retry=retry, timeout=timeout, metadata=metadata, ) diff --git a/google/cloud/aiplatform_v1beta1/services/job_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/job_service/transports/base.py index 6ac6330d01..abedda51f9 100644 --- a/google/cloud/aiplatform_v1beta1/services/job_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/job_service/transports/base.py @@ -124,29 +124,19 @@ def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { self.create_custom_job: gapic_v1.method.wrap_method( - self.create_custom_job, - default_timeout=5.0, - client_info=client_info, + self.create_custom_job, default_timeout=5.0, client_info=client_info, ), self.get_custom_job: gapic_v1.method.wrap_method( - self.get_custom_job, - default_timeout=5.0, - client_info=client_info, + self.get_custom_job, default_timeout=5.0, client_info=client_info, ), self.list_custom_jobs: gapic_v1.method.wrap_method( - self.list_custom_jobs, - default_timeout=5.0, - client_info=client_info, + self.list_custom_jobs, default_timeout=5.0, client_info=client_info, ), self.delete_custom_job: gapic_v1.method.wrap_method( - self.delete_custom_job, - default_timeout=5.0, - client_info=client_info, + self.delete_custom_job, default_timeout=5.0, client_info=client_info, ), self.cancel_custom_job: gapic_v1.method.wrap_method( - self.cancel_custom_job, - default_timeout=5.0, - client_info=client_info, + self.cancel_custom_job, default_timeout=5.0, client_info=client_info, ), self.create_data_labeling_job: gapic_v1.method.wrap_method( self.create_data_labeling_job, diff --git a/google/cloud/aiplatform_v1beta1/services/migration_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/migration_service/async_client.py index 0f2348ac38..af13c4d4fb 100644 --- a/google/cloud/aiplatform_v1beta1/services/migration_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/migration_service/async_client.py @@ -236,20 +236,12 @@ async def search_migratable_resources( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.SearchMigratableResourcesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -344,12 +336,7 @@ async def batch_migrate_resources( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( diff --git a/google/cloud/aiplatform_v1beta1/services/migration_service/client.py b/google/cloud/aiplatform_v1beta1/services/migration_service/client.py index 116a987f86..bf1f8e5c6b 100644 --- a/google/cloud/aiplatform_v1beta1/services/migration_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/migration_service/client.py @@ -57,10 +57,7 @@ class MigrationServiceClientMeta(type): _transport_registry["grpc"] = MigrationServiceGrpcTransport _transport_registry["grpc_asyncio"] = MigrationServiceGrpcAsyncIOTransport - def get_transport_class( - cls, - label: str = None, - ) -> Type[MigrationServiceTransport]: + def get_transport_class(cls, label: str = None,) -> Type[MigrationServiceTransport]: """Return an appropriate transport class. Args: @@ -150,15 +147,11 @@ def transport(self) -> MigrationServiceTransport: @staticmethod def annotated_dataset_path( - project: str, - dataset: str, - annotated_dataset: str, + project: str, dataset: str, annotated_dataset: str, ) -> str: """Return a fully-qualified annotated_dataset string.""" return "projects/{project}/datasets/{dataset}/annotatedDatasets/{annotated_dataset}".format( - project=project, - dataset=dataset, - annotated_dataset=annotated_dataset, + project=project, dataset=dataset, annotated_dataset=annotated_dataset, ) @staticmethod @@ -171,16 +164,10 @@ def parse_annotated_dataset_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def dataset_path( - project: str, - location: str, - dataset: str, - ) -> str: + def dataset_path(project: str, location: str, dataset: str,) -> str: """Return a fully-qualified dataset string.""" return "projects/{project}/locations/{location}/datasets/{dataset}".format( - project=project, - location=location, - dataset=dataset, + project=project, location=location, dataset=dataset, ) @staticmethod @@ -193,14 +180,10 @@ def parse_dataset_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def dataset_path( - project: str, - dataset: str, - ) -> str: + def dataset_path(project: str, dataset: str,) -> str: """Return a fully-qualified dataset string.""" return "projects/{project}/datasets/{dataset}".format( - project=project, - dataset=dataset, + project=project, dataset=dataset, ) @staticmethod @@ -210,16 +193,10 @@ def parse_dataset_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def dataset_path( - project: str, - location: str, - dataset: str, - ) -> str: + def dataset_path(project: str, location: str, dataset: str,) -> str: """Return a fully-qualified dataset string.""" return "projects/{project}/locations/{location}/datasets/{dataset}".format( - project=project, - location=location, - dataset=dataset, + project=project, location=location, dataset=dataset, ) @staticmethod @@ -232,16 +209,10 @@ def parse_dataset_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def model_path( - project: str, - location: str, - model: str, - ) -> str: + def model_path(project: str, location: str, model: str,) -> str: """Return a fully-qualified model string.""" return "projects/{project}/locations/{location}/models/{model}".format( - project=project, - location=location, - model=model, + project=project, location=location, model=model, ) @staticmethod @@ -254,16 +225,10 @@ def parse_model_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def model_path( - project: str, - location: str, - model: str, - ) -> str: + def model_path(project: str, location: str, model: str,) -> str: """Return a fully-qualified model string.""" return "projects/{project}/locations/{location}/models/{model}".format( - project=project, - location=location, - model=model, + project=project, location=location, model=model, ) @staticmethod @@ -276,16 +241,10 @@ def parse_model_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def version_path( - project: str, - model: str, - version: str, - ) -> str: + def version_path(project: str, model: str, version: str,) -> str: """Return a fully-qualified version string.""" return "projects/{project}/models/{model}/versions/{version}".format( - project=project, - model=model, - version=version, + project=project, model=model, version=version, ) @staticmethod @@ -298,9 +257,7 @@ def parse_version_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_billing_account_path( - billing_account: str, - ) -> str: + def common_billing_account_path(billing_account: str,) -> str: """Return a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -313,13 +270,9 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path( - folder: str, - ) -> str: + def common_folder_path(folder: str,) -> str: """Return a fully-qualified folder string.""" - return "folders/{folder}".format( - folder=folder, - ) + return "folders/{folder}".format(folder=folder,) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -328,13 +281,9 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path( - organization: str, - ) -> str: + def common_organization_path(organization: str,) -> str: """Return a fully-qualified organization string.""" - return "organizations/{organization}".format( - organization=organization, - ) + return "organizations/{organization}".format(organization=organization,) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -343,13 +292,9 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path( - project: str, - ) -> str: + def common_project_path(project: str,) -> str: """Return a fully-qualified project string.""" - return "projects/{project}".format( - project=project, - ) + return "projects/{project}".format(project=project,) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -358,14 +303,10 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path( - project: str, - location: str, - ) -> str: + def common_location_path(project: str, location: str,) -> str: """Return a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, - location=location, + project=project, location=location, ) @staticmethod @@ -570,20 +511,12 @@ def search_migratable_resources( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.SearchMigratableResourcesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -679,12 +612,7 @@ def batch_migrate_resources( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation.from_gapic( diff --git a/google/cloud/aiplatform_v1beta1/services/model_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/model_service/async_client.py index 631671e269..3b27b6e184 100644 --- a/google/cloud/aiplatform_v1beta1/services/model_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/model_service/async_client.py @@ -246,12 +246,7 @@ async def upload_model( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -329,12 +324,7 @@ async def get_model( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -410,20 +400,12 @@ async def list_models( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListModelsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -507,12 +489,7 @@ async def update_model( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -602,12 +579,7 @@ async def delete_model( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -705,12 +677,7 @@ async def export_model( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -794,12 +761,7 @@ async def get_model_evaluation( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -875,20 +837,12 @@ async def list_model_evaluations( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListModelEvaluationsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -965,12 +919,7 @@ async def get_model_evaluation_slice( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -1047,20 +996,12 @@ async def list_model_evaluation_slices( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListModelEvaluationSlicesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. diff --git a/google/cloud/aiplatform_v1beta1/services/model_service/client.py b/google/cloud/aiplatform_v1beta1/services/model_service/client.py index 423a86bb70..30c00c0c9d 100644 --- a/google/cloud/aiplatform_v1beta1/services/model_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/model_service/client.py @@ -65,10 +65,7 @@ class ModelServiceClientMeta(type): _transport_registry["grpc"] = ModelServiceGrpcTransport _transport_registry["grpc_asyncio"] = ModelServiceGrpcAsyncIOTransport - def get_transport_class( - cls, - label: str = None, - ) -> Type[ModelServiceTransport]: + def get_transport_class(cls, label: str = None,) -> Type[ModelServiceTransport]: """Return an appropriate transport class. Args: @@ -154,16 +151,10 @@ def transport(self) -> ModelServiceTransport: return self._transport @staticmethod - def endpoint_path( - project: str, - location: str, - endpoint: str, - ) -> str: + def endpoint_path(project: str, location: str, endpoint: str,) -> str: """Return a fully-qualified endpoint string.""" return "projects/{project}/locations/{location}/endpoints/{endpoint}".format( - project=project, - location=location, - endpoint=endpoint, + project=project, location=location, endpoint=endpoint, ) @staticmethod @@ -176,16 +167,10 @@ def parse_endpoint_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def model_path( - project: str, - location: str, - model: str, - ) -> str: + def model_path(project: str, location: str, model: str,) -> str: """Return a fully-qualified model string.""" return "projects/{project}/locations/{location}/models/{model}".format( - project=project, - location=location, - model=model, + project=project, location=location, model=model, ) @staticmethod @@ -199,17 +184,11 @@ def parse_model_path(path: str) -> Dict[str, str]: @staticmethod def model_evaluation_path( - project: str, - location: str, - model: str, - evaluation: str, + project: str, location: str, model: str, evaluation: str, ) -> str: """Return a fully-qualified model_evaluation string.""" return "projects/{project}/locations/{location}/models/{model}/evaluations/{evaluation}".format( - project=project, - location=location, - model=model, - evaluation=evaluation, + project=project, location=location, model=model, evaluation=evaluation, ) @staticmethod @@ -223,11 +202,7 @@ def parse_model_evaluation_path(path: str) -> Dict[str, str]: @staticmethod def model_evaluation_slice_path( - project: str, - location: str, - model: str, - evaluation: str, - slice: str, + project: str, location: str, model: str, evaluation: str, slice: str, ) -> str: """Return a fully-qualified model_evaluation_slice string.""" return "projects/{project}/locations/{location}/models/{model}/evaluations/{evaluation}/slices/{slice}".format( @@ -249,15 +224,11 @@ def parse_model_evaluation_slice_path(path: str) -> Dict[str, str]: @staticmethod def training_pipeline_path( - project: str, - location: str, - training_pipeline: str, + project: str, location: str, training_pipeline: str, ) -> str: """Return a fully-qualified training_pipeline string.""" return "projects/{project}/locations/{location}/trainingPipelines/{training_pipeline}".format( - project=project, - location=location, - training_pipeline=training_pipeline, + project=project, location=location, training_pipeline=training_pipeline, ) @staticmethod @@ -270,9 +241,7 @@ def parse_training_pipeline_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_billing_account_path( - billing_account: str, - ) -> str: + def common_billing_account_path(billing_account: str,) -> str: """Return a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -285,13 +254,9 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path( - folder: str, - ) -> str: + def common_folder_path(folder: str,) -> str: """Return a fully-qualified folder string.""" - return "folders/{folder}".format( - folder=folder, - ) + return "folders/{folder}".format(folder=folder,) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -300,13 +265,9 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path( - organization: str, - ) -> str: + def common_organization_path(organization: str,) -> str: """Return a fully-qualified organization string.""" - return "organizations/{organization}".format( - organization=organization, - ) + return "organizations/{organization}".format(organization=organization,) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -315,13 +276,9 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path( - project: str, - ) -> str: + def common_project_path(project: str,) -> str: """Return a fully-qualified project string.""" - return "projects/{project}".format( - project=project, - ) + return "projects/{project}".format(project=project,) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -330,14 +287,10 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path( - project: str, - location: str, - ) -> str: + def common_location_path(project: str, location: str,) -> str: """Return a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, - location=location, + project=project, location=location, ) @staticmethod @@ -545,12 +498,7 @@ def upload_model( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -629,12 +577,7 @@ def get_model( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -711,20 +654,12 @@ def list_models( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListModelsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -809,12 +744,7 @@ def update_model( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -905,12 +835,7 @@ def delete_model( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -1009,12 +934,7 @@ def export_model( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -1099,12 +1019,7 @@ def get_model_evaluation( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -1181,20 +1096,12 @@ def list_model_evaluations( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListModelEvaluationsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -1274,12 +1181,7 @@ def get_model_evaluation_slice( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -1359,20 +1261,12 @@ def list_model_evaluation_slices( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListModelEvaluationSlicesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. diff --git a/google/cloud/aiplatform_v1beta1/services/model_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/model_service/transports/base.py index 17e7a98018..a0b896cdf4 100644 --- a/google/cloud/aiplatform_v1beta1/services/model_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/model_service/transports/base.py @@ -113,39 +113,25 @@ def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { self.upload_model: gapic_v1.method.wrap_method( - self.upload_model, - default_timeout=5.0, - client_info=client_info, + self.upload_model, default_timeout=5.0, client_info=client_info, ), self.get_model: gapic_v1.method.wrap_method( - self.get_model, - default_timeout=5.0, - client_info=client_info, + self.get_model, default_timeout=5.0, client_info=client_info, ), self.list_models: gapic_v1.method.wrap_method( - self.list_models, - default_timeout=5.0, - client_info=client_info, + self.list_models, default_timeout=5.0, client_info=client_info, ), self.update_model: gapic_v1.method.wrap_method( - self.update_model, - default_timeout=5.0, - client_info=client_info, + self.update_model, default_timeout=5.0, client_info=client_info, ), self.delete_model: gapic_v1.method.wrap_method( - self.delete_model, - default_timeout=5.0, - client_info=client_info, + self.delete_model, default_timeout=5.0, client_info=client_info, ), self.export_model: gapic_v1.method.wrap_method( - self.export_model, - default_timeout=5.0, - client_info=client_info, + self.export_model, default_timeout=5.0, client_info=client_info, ), self.get_model_evaluation: gapic_v1.method.wrap_method( - self.get_model_evaluation, - default_timeout=5.0, - client_info=client_info, + self.get_model_evaluation, default_timeout=5.0, client_info=client_info, ), self.list_model_evaluations: gapic_v1.method.wrap_method( self.list_model_evaluations, diff --git a/google/cloud/aiplatform_v1beta1/services/pipeline_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/pipeline_service/async_client.py index 9b3f2f7fa7..ef420aae0b 100644 --- a/google/cloud/aiplatform_v1beta1/services/pipeline_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/pipeline_service/async_client.py @@ -241,12 +241,7 @@ async def create_training_pipeline( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -324,12 +319,7 @@ async def get_training_pipeline( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -405,20 +395,12 @@ async def list_training_pipelines( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListTrainingPipelinesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -508,12 +490,7 @@ async def delete_training_pipeline( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -602,10 +579,7 @@ async def cancel_training_pipeline( # Send the request. await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, + request, retry=retry, timeout=timeout, metadata=metadata, ) diff --git a/google/cloud/aiplatform_v1beta1/services/pipeline_service/client.py b/google/cloud/aiplatform_v1beta1/services/pipeline_service/client.py index 73c79cc90d..e3e7d6aeda 100644 --- a/google/cloud/aiplatform_v1beta1/services/pipeline_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/pipeline_service/client.py @@ -67,10 +67,7 @@ class PipelineServiceClientMeta(type): _transport_registry["grpc"] = PipelineServiceGrpcTransport _transport_registry["grpc_asyncio"] = PipelineServiceGrpcAsyncIOTransport - def get_transport_class( - cls, - label: str = None, - ) -> Type[PipelineServiceTransport]: + def get_transport_class(cls, label: str = None,) -> Type[PipelineServiceTransport]: """Return an appropriate transport class. Args: @@ -156,16 +153,10 @@ def transport(self) -> PipelineServiceTransport: return self._transport @staticmethod - def endpoint_path( - project: str, - location: str, - endpoint: str, - ) -> str: + def endpoint_path(project: str, location: str, endpoint: str,) -> str: """Return a fully-qualified endpoint string.""" return "projects/{project}/locations/{location}/endpoints/{endpoint}".format( - project=project, - location=location, - endpoint=endpoint, + project=project, location=location, endpoint=endpoint, ) @staticmethod @@ -178,16 +169,10 @@ def parse_endpoint_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def model_path( - project: str, - location: str, - model: str, - ) -> str: + def model_path(project: str, location: str, model: str,) -> str: """Return a fully-qualified model string.""" return "projects/{project}/locations/{location}/models/{model}".format( - project=project, - location=location, - model=model, + project=project, location=location, model=model, ) @staticmethod @@ -201,15 +186,11 @@ def parse_model_path(path: str) -> Dict[str, str]: @staticmethod def training_pipeline_path( - project: str, - location: str, - training_pipeline: str, + project: str, location: str, training_pipeline: str, ) -> str: """Return a fully-qualified training_pipeline string.""" return "projects/{project}/locations/{location}/trainingPipelines/{training_pipeline}".format( - project=project, - location=location, - training_pipeline=training_pipeline, + project=project, location=location, training_pipeline=training_pipeline, ) @staticmethod @@ -222,9 +203,7 @@ def parse_training_pipeline_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_billing_account_path( - billing_account: str, - ) -> str: + def common_billing_account_path(billing_account: str,) -> str: """Return a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -237,13 +216,9 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path( - folder: str, - ) -> str: + def common_folder_path(folder: str,) -> str: """Return a fully-qualified folder string.""" - return "folders/{folder}".format( - folder=folder, - ) + return "folders/{folder}".format(folder=folder,) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -252,13 +227,9 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path( - organization: str, - ) -> str: + def common_organization_path(organization: str,) -> str: """Return a fully-qualified organization string.""" - return "organizations/{organization}".format( - organization=organization, - ) + return "organizations/{organization}".format(organization=organization,) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -267,13 +238,9 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path( - project: str, - ) -> str: + def common_project_path(project: str,) -> str: """Return a fully-qualified project string.""" - return "projects/{project}".format( - project=project, - ) + return "projects/{project}".format(project=project,) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -282,14 +249,10 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path( - project: str, - location: str, - ) -> str: + def common_location_path(project: str, location: str,) -> str: """Return a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, - location=location, + project=project, location=location, ) @staticmethod @@ -498,12 +461,7 @@ def create_training_pipeline( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -582,12 +540,7 @@ def get_training_pipeline( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -664,20 +617,12 @@ def list_training_pipelines( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListTrainingPipelinesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -768,12 +713,7 @@ def delete_training_pipeline( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -863,10 +803,7 @@ def cancel_training_pipeline( # Send the request. rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, + request, retry=retry, timeout=timeout, metadata=metadata, ) diff --git a/google/cloud/aiplatform_v1beta1/services/prediction_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/prediction_service/async_client.py index 4545ad95e1..bb58b0bfac 100644 --- a/google/cloud/aiplatform_v1beta1/services/prediction_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/prediction_service/async_client.py @@ -241,12 +241,7 @@ async def predict( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -372,12 +367,7 @@ async def explain( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response diff --git a/google/cloud/aiplatform_v1beta1/services/prediction_service/client.py b/google/cloud/aiplatform_v1beta1/services/prediction_service/client.py index 0a01fe3aae..9a5976d697 100644 --- a/google/cloud/aiplatform_v1beta1/services/prediction_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/prediction_service/client.py @@ -56,8 +56,7 @@ class PredictionServiceClientMeta(type): _transport_registry["grpc_asyncio"] = PredictionServiceGrpcAsyncIOTransport def get_transport_class( - cls, - label: str = None, + cls, label: str = None, ) -> Type[PredictionServiceTransport]: """Return an appropriate transport class. @@ -144,16 +143,10 @@ def transport(self) -> PredictionServiceTransport: return self._transport @staticmethod - def endpoint_path( - project: str, - location: str, - endpoint: str, - ) -> str: + def endpoint_path(project: str, location: str, endpoint: str,) -> str: """Return a fully-qualified endpoint string.""" return "projects/{project}/locations/{location}/endpoints/{endpoint}".format( - project=project, - location=location, - endpoint=endpoint, + project=project, location=location, endpoint=endpoint, ) @staticmethod @@ -166,9 +159,7 @@ def parse_endpoint_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_billing_account_path( - billing_account: str, - ) -> str: + def common_billing_account_path(billing_account: str,) -> str: """Return a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -181,13 +172,9 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path( - folder: str, - ) -> str: + def common_folder_path(folder: str,) -> str: """Return a fully-qualified folder string.""" - return "folders/{folder}".format( - folder=folder, - ) + return "folders/{folder}".format(folder=folder,) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -196,13 +183,9 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path( - organization: str, - ) -> str: + def common_organization_path(organization: str,) -> str: """Return a fully-qualified organization string.""" - return "organizations/{organization}".format( - organization=organization, - ) + return "organizations/{organization}".format(organization=organization,) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -211,13 +194,9 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path( - project: str, - ) -> str: + def common_project_path(project: str,) -> str: """Return a fully-qualified project string.""" - return "projects/{project}".format( - project=project, - ) + return "projects/{project}".format(project=project,) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -226,14 +205,10 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path( - project: str, - location: str, - ) -> str: + def common_location_path(project: str, location: str,) -> str: """Return a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, - location=location, + project=project, location=location, ) @staticmethod @@ -460,12 +435,7 @@ def predict( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -592,12 +562,7 @@ def explain( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response diff --git a/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/base.py index 739153f493..f2f7a028cc 100644 --- a/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/base.py @@ -107,14 +107,10 @@ def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { self.predict: gapic_v1.method.wrap_method( - self.predict, - default_timeout=5.0, - client_info=client_info, + self.predict, default_timeout=5.0, client_info=client_info, ), self.explain: gapic_v1.method.wrap_method( - self.explain, - default_timeout=5.0, - client_info=client_info, + self.explain, default_timeout=5.0, client_info=client_info, ), } diff --git a/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/async_client.py index d27ca66fe1..c693126d4c 100644 --- a/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/async_client.py @@ -247,12 +247,7 @@ async def create_specialist_pool( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -343,12 +338,7 @@ async def get_specialist_pool( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -424,20 +414,12 @@ async def list_specialist_pools( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListSpecialistPoolsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -527,12 +509,7 @@ async def delete_specialist_pool( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -633,12 +610,7 @@ async def update_specialist_pool( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( diff --git a/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/client.py b/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/client.py index 58a55cd7f7..efc19eca12 100644 --- a/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/client.py @@ -62,8 +62,7 @@ class SpecialistPoolServiceClientMeta(type): _transport_registry["grpc_asyncio"] = SpecialistPoolServiceGrpcAsyncIOTransport def get_transport_class( - cls, - label: str = None, + cls, label: str = None, ) -> Type[SpecialistPoolServiceTransport]: """Return an appropriate transport class. @@ -156,16 +155,10 @@ def transport(self) -> SpecialistPoolServiceTransport: return self._transport @staticmethod - def specialist_pool_path( - project: str, - location: str, - specialist_pool: str, - ) -> str: + def specialist_pool_path(project: str, location: str, specialist_pool: str,) -> str: """Return a fully-qualified specialist_pool string.""" return "projects/{project}/locations/{location}/specialistPools/{specialist_pool}".format( - project=project, - location=location, - specialist_pool=specialist_pool, + project=project, location=location, specialist_pool=specialist_pool, ) @staticmethod @@ -178,9 +171,7 @@ def parse_specialist_pool_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_billing_account_path( - billing_account: str, - ) -> str: + def common_billing_account_path(billing_account: str,) -> str: """Return a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -193,13 +184,9 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path( - folder: str, - ) -> str: + def common_folder_path(folder: str,) -> str: """Return a fully-qualified folder string.""" - return "folders/{folder}".format( - folder=folder, - ) + return "folders/{folder}".format(folder=folder,) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -208,13 +195,9 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path( - organization: str, - ) -> str: + def common_organization_path(organization: str,) -> str: """Return a fully-qualified organization string.""" - return "organizations/{organization}".format( - organization=organization, - ) + return "organizations/{organization}".format(organization=organization,) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -223,13 +206,9 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path( - project: str, - ) -> str: + def common_project_path(project: str,) -> str: """Return a fully-qualified project string.""" - return "projects/{project}".format( - project=project, - ) + return "projects/{project}".format(project=project,) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -238,14 +217,10 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path( - project: str, - location: str, - ) -> str: + def common_location_path(project: str, location: str,) -> str: """Return a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, - location=location, + project=project, location=location, ) @staticmethod @@ -459,12 +434,7 @@ def create_specialist_pool( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -556,12 +526,7 @@ def get_specialist_pool( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -638,20 +603,12 @@ def list_specialist_pools( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListSpecialistPoolsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -742,12 +699,7 @@ def delete_specialist_pool( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -849,12 +801,7 @@ def update_specialist_pool( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = ga_operation.from_gapic( diff --git a/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/transports/base.py index e4de291be3..a39c2f1f71 100644 --- a/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/transports/base.py @@ -115,9 +115,7 @@ def _prep_wrapped_messages(self, client_info): client_info=client_info, ), self.get_specialist_pool: gapic_v1.method.wrap_method( - self.get_specialist_pool, - default_timeout=5.0, - client_info=client_info, + self.get_specialist_pool, default_timeout=5.0, client_info=client_info, ), self.list_specialist_pools: gapic_v1.method.wrap_method( self.list_specialist_pools, diff --git a/google/cloud/aiplatform_v1beta1/types/accelerator_type.py b/google/cloud/aiplatform_v1beta1/types/accelerator_type.py index 23be882aaa..337b0eeaf5 100644 --- a/google/cloud/aiplatform_v1beta1/types/accelerator_type.py +++ b/google/cloud/aiplatform_v1beta1/types/accelerator_type.py @@ -19,10 +19,7 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", - manifest={ - "AcceleratorType", - }, + package="google.cloud.aiplatform.v1beta1", manifest={"AcceleratorType",}, ) diff --git a/google/cloud/aiplatform_v1beta1/types/annotation.py b/google/cloud/aiplatform_v1beta1/types/annotation.py index 93bd0481b1..7734fcc512 100644 --- a/google/cloud/aiplatform_v1beta1/types/annotation.py +++ b/google/cloud/aiplatform_v1beta1/types/annotation.py @@ -24,10 +24,7 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", - manifest={ - "Annotation", - }, + package="google.cloud.aiplatform.v1beta1", manifest={"Annotation",}, ) @@ -94,30 +91,16 @@ class Annotation(proto.Message): payload_schema_uri = proto.Field(proto.STRING, number=2) - payload = proto.Field( - proto.MESSAGE, - number=3, - message=struct.Value, - ) + payload = proto.Field(proto.MESSAGE, number=3, message=struct.Value,) - create_time = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp.Timestamp, - ) + create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) - update_time = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp.Timestamp, - ) + update_time = proto.Field(proto.MESSAGE, number=7, message=timestamp.Timestamp,) etag = proto.Field(proto.STRING, number=8) annotation_source = proto.Field( - proto.MESSAGE, - number=5, - message=user_action_reference.UserActionReference, + proto.MESSAGE, number=5, message=user_action_reference.UserActionReference, ) labels = proto.MapField(proto.STRING, proto.STRING, number=6) diff --git a/google/cloud/aiplatform_v1beta1/types/annotation_spec.py b/google/cloud/aiplatform_v1beta1/types/annotation_spec.py index 2d6e16e44f..a5a4b3d489 100644 --- a/google/cloud/aiplatform_v1beta1/types/annotation_spec.py +++ b/google/cloud/aiplatform_v1beta1/types/annotation_spec.py @@ -22,10 +22,7 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", - manifest={ - "AnnotationSpec", - }, + package="google.cloud.aiplatform.v1beta1", manifest={"AnnotationSpec",}, ) @@ -58,17 +55,9 @@ class AnnotationSpec(proto.Message): display_name = proto.Field(proto.STRING, number=2) - create_time = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp.Timestamp, - ) - - update_time = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp.Timestamp, - ) + create_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,) + + update_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) etag = proto.Field(proto.STRING, number=5) diff --git a/google/cloud/aiplatform_v1beta1/types/batch_prediction_job.py b/google/cloud/aiplatform_v1beta1/types/batch_prediction_job.py index 3d7501f3ce..625bf83155 100644 --- a/google/cloud/aiplatform_v1beta1/types/batch_prediction_job.py +++ b/google/cloud/aiplatform_v1beta1/types/batch_prediction_job.py @@ -34,10 +34,7 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", - manifest={ - "BatchPredictionJob", - }, + package="google.cloud.aiplatform.v1beta1", manifest={"BatchPredictionJob",}, ) @@ -214,17 +211,11 @@ class InputConfig(proto.Message): """ gcs_source = proto.Field( - proto.MESSAGE, - number=2, - oneof="source", - message=io.GcsSource, + proto.MESSAGE, number=2, oneof="source", message=io.GcsSource, ) bigquery_source = proto.Field( - proto.MESSAGE, - number=3, - oneof="source", - message=io.BigQuerySource, + proto.MESSAGE, number=3, oneof="source", message=io.BigQuerySource, ) instances_format = proto.Field(proto.STRING, number=1) @@ -296,10 +287,7 @@ class OutputConfig(proto.Message): """ gcs_destination = proto.Field( - proto.MESSAGE, - number=2, - oneof="destination", - message=io.GcsDestination, + proto.MESSAGE, number=2, oneof="destination", message=io.GcsDestination, ) bigquery_destination = proto.Field( @@ -340,28 +328,14 @@ class OutputInfo(proto.Message): model = proto.Field(proto.STRING, number=3) - input_config = proto.Field( - proto.MESSAGE, - number=4, - message=InputConfig, - ) + input_config = proto.Field(proto.MESSAGE, number=4, message=InputConfig,) - model_parameters = proto.Field( - proto.MESSAGE, - number=5, - message=struct.Value, - ) + model_parameters = proto.Field(proto.MESSAGE, number=5, message=struct.Value,) - output_config = proto.Field( - proto.MESSAGE, - number=6, - message=OutputConfig, - ) + output_config = proto.Field(proto.MESSAGE, number=6, message=OutputConfig,) dedicated_resources = proto.Field( - proto.MESSAGE, - number=7, - message=machine_resources.BatchDedicatedResources, + proto.MESSAGE, number=7, message=machine_resources.BatchDedicatedResources, ) manual_batch_tuning_parameters = proto.Field( @@ -373,70 +347,34 @@ class OutputInfo(proto.Message): generate_explanation = proto.Field(proto.BOOL, number=23) explanation_spec = proto.Field( - proto.MESSAGE, - number=25, - message=explanation.ExplanationSpec, + proto.MESSAGE, number=25, message=explanation.ExplanationSpec, ) - output_info = proto.Field( - proto.MESSAGE, - number=9, - message=OutputInfo, - ) + output_info = proto.Field(proto.MESSAGE, number=9, message=OutputInfo,) - state = proto.Field( - proto.ENUM, - number=10, - enum=job_state.JobState, - ) + state = proto.Field(proto.ENUM, number=10, enum=job_state.JobState,) - error = proto.Field( - proto.MESSAGE, - number=11, - message=status.Status, - ) + error = proto.Field(proto.MESSAGE, number=11, message=status.Status,) partial_failures = proto.RepeatedField( - proto.MESSAGE, - number=12, - message=status.Status, + proto.MESSAGE, number=12, message=status.Status, ) resources_consumed = proto.Field( - proto.MESSAGE, - number=13, - message=machine_resources.ResourcesConsumed, + proto.MESSAGE, number=13, message=machine_resources.ResourcesConsumed, ) completion_stats = proto.Field( - proto.MESSAGE, - number=14, - message=gca_completion_stats.CompletionStats, + proto.MESSAGE, number=14, message=gca_completion_stats.CompletionStats, ) - create_time = proto.Field( - proto.MESSAGE, - number=15, - message=timestamp.Timestamp, - ) + create_time = proto.Field(proto.MESSAGE, number=15, message=timestamp.Timestamp,) - start_time = proto.Field( - proto.MESSAGE, - number=16, - message=timestamp.Timestamp, - ) + start_time = proto.Field(proto.MESSAGE, number=16, message=timestamp.Timestamp,) - end_time = proto.Field( - proto.MESSAGE, - number=17, - message=timestamp.Timestamp, - ) + end_time = proto.Field(proto.MESSAGE, number=17, message=timestamp.Timestamp,) - update_time = proto.Field( - proto.MESSAGE, - number=18, - message=timestamp.Timestamp, - ) + update_time = proto.Field(proto.MESSAGE, number=18, message=timestamp.Timestamp,) labels = proto.MapField(proto.STRING, proto.STRING, number=19) diff --git a/google/cloud/aiplatform_v1beta1/types/completion_stats.py b/google/cloud/aiplatform_v1beta1/types/completion_stats.py index f2626b9c9b..165be59634 100644 --- a/google/cloud/aiplatform_v1beta1/types/completion_stats.py +++ b/google/cloud/aiplatform_v1beta1/types/completion_stats.py @@ -19,10 +19,7 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", - manifest={ - "CompletionStats", - }, + package="google.cloud.aiplatform.v1beta1", manifest={"CompletionStats",}, ) diff --git a/google/cloud/aiplatform_v1beta1/types/custom_job.py b/google/cloud/aiplatform_v1beta1/types/custom_job.py index d4e2a086bd..2d8745538c 100644 --- a/google/cloud/aiplatform_v1beta1/types/custom_job.py +++ b/google/cloud/aiplatform_v1beta1/types/custom_job.py @@ -89,47 +89,19 @@ class CustomJob(proto.Message): display_name = proto.Field(proto.STRING, number=2) - job_spec = proto.Field( - proto.MESSAGE, - number=4, - message="CustomJobSpec", - ) + job_spec = proto.Field(proto.MESSAGE, number=4, message="CustomJobSpec",) - state = proto.Field( - proto.ENUM, - number=5, - enum=job_state.JobState, - ) + state = proto.Field(proto.ENUM, number=5, enum=job_state.JobState,) - create_time = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp.Timestamp, - ) + create_time = proto.Field(proto.MESSAGE, number=6, message=timestamp.Timestamp,) - start_time = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp.Timestamp, - ) + start_time = proto.Field(proto.MESSAGE, number=7, message=timestamp.Timestamp,) - end_time = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp.Timestamp, - ) + end_time = proto.Field(proto.MESSAGE, number=8, message=timestamp.Timestamp,) - update_time = proto.Field( - proto.MESSAGE, - number=9, - message=timestamp.Timestamp, - ) + update_time = proto.Field(proto.MESSAGE, number=9, message=timestamp.Timestamp,) - error = proto.Field( - proto.MESSAGE, - number=10, - message=status.Status, - ) + error = proto.Field(proto.MESSAGE, number=10, message=status.Status,) labels = proto.MapField(proto.STRING, proto.STRING, number=11) @@ -195,25 +167,17 @@ class CustomJobSpec(proto.Message): """ worker_pool_specs = proto.RepeatedField( - proto.MESSAGE, - number=1, - message="WorkerPoolSpec", + proto.MESSAGE, number=1, message="WorkerPoolSpec", ) - scheduling = proto.Field( - proto.MESSAGE, - number=3, - message="Scheduling", - ) + scheduling = proto.Field(proto.MESSAGE, number=3, message="Scheduling",) service_account = proto.Field(proto.STRING, number=4) network = proto.Field(proto.STRING, number=5) base_output_directory = proto.Field( - proto.MESSAGE, - number=6, - message=io.GcsDestination, + proto.MESSAGE, number=6, message=io.GcsDestination, ) @@ -236,31 +200,21 @@ class WorkerPoolSpec(proto.Message): """ container_spec = proto.Field( - proto.MESSAGE, - number=6, - oneof="task", - message="ContainerSpec", + proto.MESSAGE, number=6, oneof="task", message="ContainerSpec", ) python_package_spec = proto.Field( - proto.MESSAGE, - number=7, - oneof="task", - message="PythonPackageSpec", + proto.MESSAGE, number=7, oneof="task", message="PythonPackageSpec", ) machine_spec = proto.Field( - proto.MESSAGE, - number=1, - message=machine_resources.MachineSpec, + proto.MESSAGE, number=1, message=machine_resources.MachineSpec, ) replica_count = proto.Field(proto.INT64, number=2) disk_spec = proto.Field( - proto.MESSAGE, - number=5, - message=machine_resources.DiskSpec, + proto.MESSAGE, number=5, message=machine_resources.DiskSpec, ) @@ -336,11 +290,7 @@ class Scheduling(proto.Message): to workers leaving and joining a job. """ - timeout = proto.Field( - proto.MESSAGE, - number=1, - message=duration.Duration, - ) + timeout = proto.Field(proto.MESSAGE, number=1, message=duration.Duration,) restart_job_on_worker_restart = proto.Field(proto.BOOL, number=3) diff --git a/google/cloud/aiplatform_v1beta1/types/data_item.py b/google/cloud/aiplatform_v1beta1/types/data_item.py index 8ef4b9c8c6..e43a944d94 100644 --- a/google/cloud/aiplatform_v1beta1/types/data_item.py +++ b/google/cloud/aiplatform_v1beta1/types/data_item.py @@ -23,10 +23,7 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", - manifest={ - "DataItem", - }, + package="google.cloud.aiplatform.v1beta1", manifest={"DataItem",}, ) @@ -73,25 +70,13 @@ class DataItem(proto.Message): name = proto.Field(proto.STRING, number=1) - create_time = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp.Timestamp, - ) + create_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) - update_time = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp.Timestamp, - ) + update_time = proto.Field(proto.MESSAGE, number=6, message=timestamp.Timestamp,) labels = proto.MapField(proto.STRING, proto.STRING, number=3) - payload = proto.Field( - proto.MESSAGE, - number=4, - message=struct.Value, - ) + payload = proto.Field(proto.MESSAGE, number=4, message=struct.Value,) etag = proto.Field(proto.STRING, number=7) diff --git a/google/cloud/aiplatform_v1beta1/types/data_labeling_job.py b/google/cloud/aiplatform_v1beta1/types/data_labeling_job.py index c1542d0661..af1bcdd871 100644 --- a/google/cloud/aiplatform_v1beta1/types/data_labeling_job.py +++ b/google/cloud/aiplatform_v1beta1/types/data_labeling_job.py @@ -146,52 +146,26 @@ class DataLabelingJob(proto.Message): inputs_schema_uri = proto.Field(proto.STRING, number=6) - inputs = proto.Field( - proto.MESSAGE, - number=7, - message=struct.Value, - ) + inputs = proto.Field(proto.MESSAGE, number=7, message=struct.Value,) - state = proto.Field( - proto.ENUM, - number=8, - enum=job_state.JobState, - ) + state = proto.Field(proto.ENUM, number=8, enum=job_state.JobState,) labeling_progress = proto.Field(proto.INT32, number=13) - current_spend = proto.Field( - proto.MESSAGE, - number=14, - message=money.Money, - ) + current_spend = proto.Field(proto.MESSAGE, number=14, message=money.Money,) - create_time = proto.Field( - proto.MESSAGE, - number=9, - message=timestamp.Timestamp, - ) + create_time = proto.Field(proto.MESSAGE, number=9, message=timestamp.Timestamp,) - update_time = proto.Field( - proto.MESSAGE, - number=10, - message=timestamp.Timestamp, - ) + update_time = proto.Field(proto.MESSAGE, number=10, message=timestamp.Timestamp,) - error = proto.Field( - proto.MESSAGE, - number=22, - message=status.Status, - ) + error = proto.Field(proto.MESSAGE, number=22, message=status.Status,) labels = proto.MapField(proto.STRING, proto.STRING, number=11) specialist_pools = proto.RepeatedField(proto.STRING, number=16) active_learning_config = proto.Field( - proto.MESSAGE, - number=21, - message="ActiveLearningConfig", + proto.MESSAGE, number=21, message="ActiveLearningConfig", ) @@ -228,17 +202,9 @@ class ActiveLearningConfig(proto.Message): proto.INT32, number=2, oneof="human_labeling_budget" ) - sample_config = proto.Field( - proto.MESSAGE, - number=3, - message="SampleConfig", - ) + sample_config = proto.Field(proto.MESSAGE, number=3, message="SampleConfig",) - training_config = proto.Field( - proto.MESSAGE, - number=4, - message="TrainingConfig", - ) + training_config = proto.Field(proto.MESSAGE, number=4, message="TrainingConfig",) class SampleConfig(proto.Message): @@ -275,11 +241,7 @@ class SampleStrategy(proto.Enum): proto.INT32, number=3, oneof="following_batch_sample_size" ) - sample_strategy = proto.Field( - proto.ENUM, - number=5, - enum=SampleStrategy, - ) + sample_strategy = proto.Field(proto.ENUM, number=5, enum=SampleStrategy,) class TrainingConfig(proto.Message): diff --git a/google/cloud/aiplatform_v1beta1/types/dataset.py b/google/cloud/aiplatform_v1beta1/types/dataset.py index 8b93a04e1b..76f6462f40 100644 --- a/google/cloud/aiplatform_v1beta1/types/dataset.py +++ b/google/cloud/aiplatform_v1beta1/types/dataset.py @@ -25,11 +25,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1", - manifest={ - "Dataset", - "ImportDataConfig", - "ExportDataConfig", - }, + manifest={"Dataset", "ImportDataConfig", "ExportDataConfig",}, ) @@ -92,23 +88,11 @@ class Dataset(proto.Message): metadata_schema_uri = proto.Field(proto.STRING, number=3) - metadata = proto.Field( - proto.MESSAGE, - number=8, - message=struct.Value, - ) + metadata = proto.Field(proto.MESSAGE, number=8, message=struct.Value,) - create_time = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp.Timestamp, - ) + create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) - update_time = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp.Timestamp, - ) + update_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,) etag = proto.Field(proto.STRING, number=6) @@ -148,10 +132,7 @@ class ImportDataConfig(proto.Message): """ gcs_source = proto.Field( - proto.MESSAGE, - number=1, - oneof="source", - message=io.GcsSource, + proto.MESSAGE, number=1, oneof="source", message=io.GcsSource, ) data_item_labels = proto.MapField(proto.STRING, proto.STRING, number=2) @@ -185,10 +166,7 @@ class ExportDataConfig(proto.Message): """ gcs_destination = proto.Field( - proto.MESSAGE, - number=1, - oneof="destination", - message=io.GcsDestination, + proto.MESSAGE, number=1, oneof="destination", message=io.GcsDestination, ) annotations_filter = proto.Field(proto.STRING, number=2) diff --git a/google/cloud/aiplatform_v1beta1/types/dataset_service.py b/google/cloud/aiplatform_v1beta1/types/dataset_service.py index aebd5ebb31..7160b7b52f 100644 --- a/google/cloud/aiplatform_v1beta1/types/dataset_service.py +++ b/google/cloud/aiplatform_v1beta1/types/dataset_service.py @@ -65,11 +65,7 @@ class CreateDatasetRequest(proto.Message): parent = proto.Field(proto.STRING, number=1) - dataset = proto.Field( - proto.MESSAGE, - number=2, - message=gca_dataset.Dataset, - ) + dataset = proto.Field(proto.MESSAGE, number=2, message=gca_dataset.Dataset,) class CreateDatasetOperationMetadata(proto.Message): @@ -82,9 +78,7 @@ class CreateDatasetOperationMetadata(proto.Message): """ generic_metadata = proto.Field( - proto.MESSAGE, - number=1, - message=operation.GenericOperationMetadata, + proto.MESSAGE, number=1, message=operation.GenericOperationMetadata, ) @@ -101,11 +95,7 @@ class GetDatasetRequest(proto.Message): name = proto.Field(proto.STRING, number=1) - read_mask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask.FieldMask, - ) + read_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) class UpdateDatasetRequest(proto.Message): @@ -128,17 +118,9 @@ class UpdateDatasetRequest(proto.Message): - ``labels`` """ - dataset = proto.Field( - proto.MESSAGE, - number=1, - message=gca_dataset.Dataset, - ) + dataset = proto.Field(proto.MESSAGE, number=1, message=gca_dataset.Dataset,) - update_mask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask.FieldMask, - ) + update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) class ListDatasetsRequest(proto.Message): @@ -175,11 +157,7 @@ class ListDatasetsRequest(proto.Message): page_token = proto.Field(proto.STRING, number=4) - read_mask = proto.Field( - proto.MESSAGE, - number=5, - message=field_mask.FieldMask, - ) + read_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask.FieldMask,) order_by = proto.Field(proto.STRING, number=6) @@ -201,9 +179,7 @@ def raw_page(self): return self datasets = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_dataset.Dataset, + proto.MESSAGE, number=1, message=gca_dataset.Dataset, ) next_page_token = proto.Field(proto.STRING, number=2) @@ -240,9 +216,7 @@ class ImportDataRequest(proto.Message): name = proto.Field(proto.STRING, number=1) import_configs = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=gca_dataset.ImportDataConfig, + proto.MESSAGE, number=2, message=gca_dataset.ImportDataConfig, ) @@ -262,9 +236,7 @@ class ImportDataOperationMetadata(proto.Message): """ generic_metadata = proto.Field( - proto.MESSAGE, - number=1, - message=operation.GenericOperationMetadata, + proto.MESSAGE, number=1, message=operation.GenericOperationMetadata, ) @@ -283,9 +255,7 @@ class ExportDataRequest(proto.Message): name = proto.Field(proto.STRING, number=1) export_config = proto.Field( - proto.MESSAGE, - number=2, - message=gca_dataset.ExportDataConfig, + proto.MESSAGE, number=2, message=gca_dataset.ExportDataConfig, ) @@ -316,9 +286,7 @@ class ExportDataOperationMetadata(proto.Message): """ generic_metadata = proto.Field( - proto.MESSAGE, - number=1, - message=operation.GenericOperationMetadata, + proto.MESSAGE, number=1, message=operation.GenericOperationMetadata, ) gcs_output_directory = proto.Field(proto.STRING, number=2) @@ -355,11 +323,7 @@ class ListDataItemsRequest(proto.Message): page_token = proto.Field(proto.STRING, number=4) - read_mask = proto.Field( - proto.MESSAGE, - number=5, - message=field_mask.FieldMask, - ) + read_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask.FieldMask,) order_by = proto.Field(proto.STRING, number=6) @@ -381,9 +345,7 @@ def raw_page(self): return self data_items = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=data_item.DataItem, + proto.MESSAGE, number=1, message=data_item.DataItem, ) next_page_token = proto.Field(proto.STRING, number=2) @@ -404,11 +366,7 @@ class GetAnnotationSpecRequest(proto.Message): name = proto.Field(proto.STRING, number=1) - read_mask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask.FieldMask, - ) + read_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) class ListAnnotationsRequest(proto.Message): @@ -443,11 +401,7 @@ class ListAnnotationsRequest(proto.Message): page_token = proto.Field(proto.STRING, number=4) - read_mask = proto.Field( - proto.MESSAGE, - number=5, - message=field_mask.FieldMask, - ) + read_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask.FieldMask,) order_by = proto.Field(proto.STRING, number=6) @@ -469,9 +423,7 @@ def raw_page(self): return self annotations = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=annotation.Annotation, + proto.MESSAGE, number=1, message=annotation.Annotation, ) next_page_token = proto.Field(proto.STRING, number=2) diff --git a/google/cloud/aiplatform_v1beta1/types/deployed_model_ref.py b/google/cloud/aiplatform_v1beta1/types/deployed_model_ref.py index f94dc7793a..b0ec7010a2 100644 --- a/google/cloud/aiplatform_v1beta1/types/deployed_model_ref.py +++ b/google/cloud/aiplatform_v1beta1/types/deployed_model_ref.py @@ -19,10 +19,7 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", - manifest={ - "DeployedModelRef", - }, + package="google.cloud.aiplatform.v1beta1", manifest={"DeployedModelRef",}, ) diff --git a/google/cloud/aiplatform_v1beta1/types/endpoint.py b/google/cloud/aiplatform_v1beta1/types/endpoint.py index 326ca3c35c..f1ba6ed85d 100644 --- a/google/cloud/aiplatform_v1beta1/types/endpoint.py +++ b/google/cloud/aiplatform_v1beta1/types/endpoint.py @@ -24,11 +24,7 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", - manifest={ - "Endpoint", - "DeployedModel", - }, + package="google.cloud.aiplatform.v1beta1", manifest={"Endpoint", "DeployedModel",}, ) @@ -92,9 +88,7 @@ class Endpoint(proto.Message): description = proto.Field(proto.STRING, number=3) deployed_models = proto.RepeatedField( - proto.MESSAGE, - number=4, - message="DeployedModel", + proto.MESSAGE, number=4, message="DeployedModel", ) traffic_split = proto.MapField(proto.STRING, proto.INT32, number=5) @@ -103,17 +97,9 @@ class Endpoint(proto.Message): labels = proto.MapField(proto.STRING, proto.STRING, number=7) - create_time = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp.Timestamp, - ) + create_time = proto.Field(proto.MESSAGE, number=8, message=timestamp.Timestamp,) - update_time = proto.Field( - proto.MESSAGE, - number=9, - message=timestamp.Timestamp, - ) + update_time = proto.Field(proto.MESSAGE, number=9, message=timestamp.Timestamp,) class DeployedModel(proto.Message): @@ -207,16 +193,10 @@ class DeployedModel(proto.Message): display_name = proto.Field(proto.STRING, number=3) - create_time = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp.Timestamp, - ) + create_time = proto.Field(proto.MESSAGE, number=6, message=timestamp.Timestamp,) explanation_spec = proto.Field( - proto.MESSAGE, - number=9, - message=explanation.ExplanationSpec, + proto.MESSAGE, number=9, message=explanation.ExplanationSpec, ) service_account = proto.Field(proto.STRING, number=11) diff --git a/google/cloud/aiplatform_v1beta1/types/endpoint_service.py b/google/cloud/aiplatform_v1beta1/types/endpoint_service.py index 659268cd22..4bc9f35594 100644 --- a/google/cloud/aiplatform_v1beta1/types/endpoint_service.py +++ b/google/cloud/aiplatform_v1beta1/types/endpoint_service.py @@ -58,11 +58,7 @@ class CreateEndpointRequest(proto.Message): parent = proto.Field(proto.STRING, number=1) - endpoint = proto.Field( - proto.MESSAGE, - number=2, - message=gca_endpoint.Endpoint, - ) + endpoint = proto.Field(proto.MESSAGE, number=2, message=gca_endpoint.Endpoint,) class CreateEndpointOperationMetadata(proto.Message): @@ -75,9 +71,7 @@ class CreateEndpointOperationMetadata(proto.Message): """ generic_metadata = proto.Field( - proto.MESSAGE, - number=1, - message=operation.GenericOperationMetadata, + proto.MESSAGE, number=1, message=operation.GenericOperationMetadata, ) @@ -149,11 +143,7 @@ class ListEndpointsRequest(proto.Message): page_token = proto.Field(proto.STRING, number=4) - read_mask = proto.Field( - proto.MESSAGE, - number=5, - message=field_mask.FieldMask, - ) + read_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask.FieldMask,) class ListEndpointsResponse(proto.Message): @@ -174,9 +164,7 @@ def raw_page(self): return self endpoints = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_endpoint.Endpoint, + proto.MESSAGE, number=1, message=gca_endpoint.Endpoint, ) next_page_token = proto.Field(proto.STRING, number=2) @@ -195,17 +183,9 @@ class UpdateEndpointRequest(proto.Message): resource. """ - endpoint = proto.Field( - proto.MESSAGE, - number=1, - message=gca_endpoint.Endpoint, - ) + endpoint = proto.Field(proto.MESSAGE, number=1, message=gca_endpoint.Endpoint,) - update_mask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask.FieldMask, - ) + update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) class DeleteEndpointRequest(proto.Message): @@ -259,9 +239,7 @@ class DeployModelRequest(proto.Message): endpoint = proto.Field(proto.STRING, number=1) deployed_model = proto.Field( - proto.MESSAGE, - number=2, - message=gca_endpoint.DeployedModel, + proto.MESSAGE, number=2, message=gca_endpoint.DeployedModel, ) traffic_split = proto.MapField(proto.STRING, proto.INT32, number=3) @@ -278,9 +256,7 @@ class DeployModelResponse(proto.Message): """ deployed_model = proto.Field( - proto.MESSAGE, - number=1, - message=gca_endpoint.DeployedModel, + proto.MESSAGE, number=1, message=gca_endpoint.DeployedModel, ) @@ -294,9 +270,7 @@ class DeployModelOperationMetadata(proto.Message): """ generic_metadata = proto.Field( - proto.MESSAGE, - number=1, - message=operation.GenericOperationMetadata, + proto.MESSAGE, number=1, message=operation.GenericOperationMetadata, ) @@ -346,9 +320,7 @@ class UndeployModelOperationMetadata(proto.Message): """ generic_metadata = proto.Field( - proto.MESSAGE, - number=1, - message=operation.GenericOperationMetadata, + proto.MESSAGE, number=1, message=operation.GenericOperationMetadata, ) diff --git a/google/cloud/aiplatform_v1beta1/types/env_var.py b/google/cloud/aiplatform_v1beta1/types/env_var.py index 74b460116d..207e8275cd 100644 --- a/google/cloud/aiplatform_v1beta1/types/env_var.py +++ b/google/cloud/aiplatform_v1beta1/types/env_var.py @@ -19,10 +19,7 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", - manifest={ - "EnvVar", - }, + package="google.cloud.aiplatform.v1beta1", manifest={"EnvVar",}, ) diff --git a/google/cloud/aiplatform_v1beta1/types/explanation.py b/google/cloud/aiplatform_v1beta1/types/explanation.py index 4b7c14c490..7a495fff1e 100644 --- a/google/cloud/aiplatform_v1beta1/types/explanation.py +++ b/google/cloud/aiplatform_v1beta1/types/explanation.py @@ -71,11 +71,7 @@ class Explanation(proto.Message): in the same order as they appear in the output_indices. """ - attributions = proto.RepeatedField( - proto.MESSAGE, - number=1, - message="Attribution", - ) + attributions = proto.RepeatedField(proto.MESSAGE, number=1, message="Attribution",) class ModelExplanation(proto.Message): @@ -113,9 +109,7 @@ class ModelExplanation(proto.Message): """ mean_attributions = proto.RepeatedField( - proto.MESSAGE, - number=1, - message="Attribution", + proto.MESSAGE, number=1, message="Attribution", ) @@ -239,11 +233,7 @@ class Attribution(proto.Message): instance_output_value = proto.Field(proto.DOUBLE, number=2) - feature_attributions = proto.Field( - proto.MESSAGE, - number=3, - message=struct.Value, - ) + feature_attributions = proto.Field(proto.MESSAGE, number=3, message=struct.Value,) output_index = proto.RepeatedField(proto.INT32, number=4) @@ -266,16 +256,10 @@ class ExplanationSpec(proto.Message): input and output for explanation. """ - parameters = proto.Field( - proto.MESSAGE, - number=1, - message="ExplanationParameters", - ) + parameters = proto.Field(proto.MESSAGE, number=1, message="ExplanationParameters",) metadata = proto.Field( - proto.MESSAGE, - number=2, - message=explanation_metadata.ExplanationMetadata, + proto.MESSAGE, number=2, message=explanation_metadata.ExplanationMetadata, ) @@ -333,10 +317,7 @@ class ExplanationParameters(proto.Message): """ sampled_shapley_attribution = proto.Field( - proto.MESSAGE, - number=1, - oneof="method", - message="SampledShapleyAttribution", + proto.MESSAGE, number=1, oneof="method", message="SampledShapleyAttribution", ) integrated_gradients_attribution = proto.Field( @@ -347,19 +328,12 @@ class ExplanationParameters(proto.Message): ) xrai_attribution = proto.Field( - proto.MESSAGE, - number=3, - oneof="method", - message="XraiAttribution", + proto.MESSAGE, number=3, oneof="method", message="XraiAttribution", ) top_k = proto.Field(proto.INT32, number=4) - output_indices = proto.Field( - proto.MESSAGE, - number=5, - message=struct.ListValue, - ) + output_indices = proto.Field(proto.MESSAGE, number=5, message=struct.ListValue,) class SampledShapleyAttribution(proto.Message): @@ -407,9 +381,7 @@ class IntegratedGradientsAttribution(proto.Message): step_count = proto.Field(proto.INT32, number=1) smooth_grad_config = proto.Field( - proto.MESSAGE, - number=2, - message="SmoothGradConfig", + proto.MESSAGE, number=2, message="SmoothGradConfig", ) @@ -444,9 +416,7 @@ class XraiAttribution(proto.Message): step_count = proto.Field(proto.INT32, number=1) smooth_grad_config = proto.Field( - proto.MESSAGE, - number=2, - message="SmoothGradConfig", + proto.MESSAGE, number=2, message="SmoothGradConfig", ) @@ -538,9 +508,7 @@ class NoiseSigmaForFeature(proto.Message): sigma = proto.Field(proto.FLOAT, number=2) noise_sigma = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=NoiseSigmaForFeature, + proto.MESSAGE, number=1, message=NoiseSigmaForFeature, ) diff --git a/google/cloud/aiplatform_v1beta1/types/explanation_metadata.py b/google/cloud/aiplatform_v1beta1/types/explanation_metadata.py index 78c46d1dd0..7261c064f8 100644 --- a/google/cloud/aiplatform_v1beta1/types/explanation_metadata.py +++ b/google/cloud/aiplatform_v1beta1/types/explanation_metadata.py @@ -22,10 +22,7 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", - manifest={ - "ExplanationMetadata", - }, + package="google.cloud.aiplatform.v1beta1", manifest={"ExplanationMetadata",}, ) @@ -319,17 +316,13 @@ class OverlayType(proto.Enum): ) input_baselines = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=struct.Value, + proto.MESSAGE, number=1, message=struct.Value, ) input_tensor_name = proto.Field(proto.STRING, number=2) encoding = proto.Field( - proto.ENUM, - number=3, - enum="ExplanationMetadata.InputMetadata.Encoding", + proto.ENUM, number=3, enum="ExplanationMetadata.InputMetadata.Encoding", ) modality = proto.Field(proto.STRING, number=4) @@ -349,9 +342,7 @@ class OverlayType(proto.Enum): encoded_tensor_name = proto.Field(proto.STRING, number=9) encoded_baselines = proto.RepeatedField( - proto.MESSAGE, - number=10, - message=struct.Value, + proto.MESSAGE, number=10, message=struct.Value, ) visualization = proto.Field( @@ -400,10 +391,7 @@ class OutputMetadata(proto.Message): """ index_display_name_mapping = proto.Field( - proto.MESSAGE, - number=1, - oneof="display_name_mapping", - message=struct.Value, + proto.MESSAGE, number=1, oneof="display_name_mapping", message=struct.Value, ) display_name_mapping_key = proto.Field( @@ -413,17 +401,11 @@ class OutputMetadata(proto.Message): output_tensor_name = proto.Field(proto.STRING, number=3) inputs = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=1, - message=InputMetadata, + proto.STRING, proto.MESSAGE, number=1, message=InputMetadata, ) outputs = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=2, - message=OutputMetadata, + proto.STRING, proto.MESSAGE, number=2, message=OutputMetadata, ) feature_attributions_schema_uri = proto.Field(proto.STRING, number=3) diff --git a/google/cloud/aiplatform_v1beta1/types/hyperparameter_tuning_job.py b/google/cloud/aiplatform_v1beta1/types/hyperparameter_tuning_job.py index 186963683c..78af635e79 100644 --- a/google/cloud/aiplatform_v1beta1/types/hyperparameter_tuning_job.py +++ b/google/cloud/aiplatform_v1beta1/types/hyperparameter_tuning_job.py @@ -26,10 +26,7 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", - manifest={ - "HyperparameterTuningJob", - }, + package="google.cloud.aiplatform.v1beta1", manifest={"HyperparameterTuningJob",}, ) @@ -102,11 +99,7 @@ class HyperparameterTuningJob(proto.Message): display_name = proto.Field(proto.STRING, number=2) - study_spec = proto.Field( - proto.MESSAGE, - number=4, - message=study.StudySpec, - ) + study_spec = proto.Field(proto.MESSAGE, number=4, message=study.StudySpec,) max_trial_count = proto.Field(proto.INT32, number=5) @@ -115,52 +108,22 @@ class HyperparameterTuningJob(proto.Message): max_failed_trial_count = proto.Field(proto.INT32, number=7) trial_job_spec = proto.Field( - proto.MESSAGE, - number=8, - message=custom_job.CustomJobSpec, + proto.MESSAGE, number=8, message=custom_job.CustomJobSpec, ) - trials = proto.RepeatedField( - proto.MESSAGE, - number=9, - message=study.Trial, - ) + trials = proto.RepeatedField(proto.MESSAGE, number=9, message=study.Trial,) - state = proto.Field( - proto.ENUM, - number=10, - enum=job_state.JobState, - ) + state = proto.Field(proto.ENUM, number=10, enum=job_state.JobState,) - create_time = proto.Field( - proto.MESSAGE, - number=11, - message=timestamp.Timestamp, - ) + create_time = proto.Field(proto.MESSAGE, number=11, message=timestamp.Timestamp,) - start_time = proto.Field( - proto.MESSAGE, - number=12, - message=timestamp.Timestamp, - ) + start_time = proto.Field(proto.MESSAGE, number=12, message=timestamp.Timestamp,) - end_time = proto.Field( - proto.MESSAGE, - number=13, - message=timestamp.Timestamp, - ) + end_time = proto.Field(proto.MESSAGE, number=13, message=timestamp.Timestamp,) - update_time = proto.Field( - proto.MESSAGE, - number=14, - message=timestamp.Timestamp, - ) + update_time = proto.Field(proto.MESSAGE, number=14, message=timestamp.Timestamp,) - error = proto.Field( - proto.MESSAGE, - number=15, - message=status.Status, - ) + error = proto.Field(proto.MESSAGE, number=15, message=status.Status,) labels = proto.MapField(proto.STRING, proto.STRING, number=16) diff --git a/google/cloud/aiplatform_v1beta1/types/job_service.py b/google/cloud/aiplatform_v1beta1/types/job_service.py index 9962e81c40..f64f07cbe3 100644 --- a/google/cloud/aiplatform_v1beta1/types/job_service.py +++ b/google/cloud/aiplatform_v1beta1/types/job_service.py @@ -77,11 +77,7 @@ class CreateCustomJobRequest(proto.Message): parent = proto.Field(proto.STRING, number=1) - custom_job = proto.Field( - proto.MESSAGE, - number=2, - message=gca_custom_job.CustomJob, - ) + custom_job = proto.Field(proto.MESSAGE, number=2, message=gca_custom_job.CustomJob,) class GetCustomJobRequest(proto.Message): @@ -144,11 +140,7 @@ class ListCustomJobsRequest(proto.Message): page_token = proto.Field(proto.STRING, number=4) - read_mask = proto.Field( - proto.MESSAGE, - number=5, - message=field_mask.FieldMask, - ) + read_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask.FieldMask,) class ListCustomJobsResponse(proto.Message): @@ -169,9 +161,7 @@ def raw_page(self): return self custom_jobs = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_custom_job.CustomJob, + proto.MESSAGE, number=1, message=gca_custom_job.CustomJob, ) next_page_token = proto.Field(proto.STRING, number=2) @@ -219,9 +209,7 @@ class CreateDataLabelingJobRequest(proto.Message): parent = proto.Field(proto.STRING, number=1) data_labeling_job = proto.Field( - proto.MESSAGE, - number=2, - message=gca_data_labeling_job.DataLabelingJob, + proto.MESSAGE, number=2, message=gca_data_labeling_job.DataLabelingJob, ) @@ -287,11 +275,7 @@ class ListDataLabelingJobsRequest(proto.Message): page_token = proto.Field(proto.STRING, number=4) - read_mask = proto.Field( - proto.MESSAGE, - number=5, - message=field_mask.FieldMask, - ) + read_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask.FieldMask,) order_by = proto.Field(proto.STRING, number=6) @@ -313,9 +297,7 @@ def raw_page(self): return self data_labeling_jobs = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_data_labeling_job.DataLabelingJob, + proto.MESSAGE, number=1, message=gca_data_labeling_job.DataLabelingJob, ) next_page_token = proto.Field(proto.STRING, number=2) @@ -435,11 +417,7 @@ class ListHyperparameterTuningJobsRequest(proto.Message): page_token = proto.Field(proto.STRING, number=4) - read_mask = proto.Field( - proto.MESSAGE, - number=5, - message=field_mask.FieldMask, - ) + read_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask.FieldMask,) class ListHyperparameterTuningJobsResponse(proto.Message): @@ -516,9 +494,7 @@ class CreateBatchPredictionJobRequest(proto.Message): parent = proto.Field(proto.STRING, number=1) batch_prediction_job = proto.Field( - proto.MESSAGE, - number=2, - message=gca_batch_prediction_job.BatchPredictionJob, + proto.MESSAGE, number=2, message=gca_batch_prediction_job.BatchPredictionJob, ) @@ -584,11 +560,7 @@ class ListBatchPredictionJobsRequest(proto.Message): page_token = proto.Field(proto.STRING, number=4) - read_mask = proto.Field( - proto.MESSAGE, - number=5, - message=field_mask.FieldMask, - ) + read_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask.FieldMask,) class ListBatchPredictionJobsResponse(proto.Message): @@ -610,9 +582,7 @@ def raw_page(self): return self batch_prediction_jobs = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_batch_prediction_job.BatchPredictionJob, + proto.MESSAGE, number=1, message=gca_batch_prediction_job.BatchPredictionJob, ) next_page_token = proto.Field(proto.STRING, number=2) diff --git a/google/cloud/aiplatform_v1beta1/types/job_state.py b/google/cloud/aiplatform_v1beta1/types/job_state.py index 2baf9e447d..f86e179b1b 100644 --- a/google/cloud/aiplatform_v1beta1/types/job_state.py +++ b/google/cloud/aiplatform_v1beta1/types/job_state.py @@ -19,10 +19,7 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", - manifest={ - "JobState", - }, + package="google.cloud.aiplatform.v1beta1", manifest={"JobState",}, ) diff --git a/google/cloud/aiplatform_v1beta1/types/machine_resources.py b/google/cloud/aiplatform_v1beta1/types/machine_resources.py index eefaa7240e..c71aca024e 100644 --- a/google/cloud/aiplatform_v1beta1/types/machine_resources.py +++ b/google/cloud/aiplatform_v1beta1/types/machine_resources.py @@ -92,9 +92,7 @@ class MachineSpec(proto.Message): machine_type = proto.Field(proto.STRING, number=1) accelerator_type = proto.Field( - proto.ENUM, - number=2, - enum=gca_accelerator_type.AcceleratorType, + proto.ENUM, number=2, enum=gca_accelerator_type.AcceleratorType, ) accelerator_count = proto.Field(proto.INT32, number=3) @@ -133,11 +131,7 @@ class DedicatedResources(proto.Message): as the default value. """ - machine_spec = proto.Field( - proto.MESSAGE, - number=1, - message="MachineSpec", - ) + machine_spec = proto.Field(proto.MESSAGE, number=1, message="MachineSpec",) min_replica_count = proto.Field(proto.INT32, number=2) @@ -201,11 +195,7 @@ class BatchDedicatedResources(proto.Message): The default value is 10. """ - machine_spec = proto.Field( - proto.MESSAGE, - number=1, - message="MachineSpec", - ) + machine_spec = proto.Field(proto.MESSAGE, number=1, message="MachineSpec",) starting_replica_count = proto.Field(proto.INT32, number=2) diff --git a/google/cloud/aiplatform_v1beta1/types/manual_batch_tuning_parameters.py b/google/cloud/aiplatform_v1beta1/types/manual_batch_tuning_parameters.py index 849c24b16c..7a467d5069 100644 --- a/google/cloud/aiplatform_v1beta1/types/manual_batch_tuning_parameters.py +++ b/google/cloud/aiplatform_v1beta1/types/manual_batch_tuning_parameters.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1", - manifest={ - "ManualBatchTuningParameters", - }, + manifest={"ManualBatchTuningParameters",}, ) diff --git a/google/cloud/aiplatform_v1beta1/types/migratable_resource.py b/google/cloud/aiplatform_v1beta1/types/migratable_resource.py index 689994f38e..99a6e65a42 100644 --- a/google/cloud/aiplatform_v1beta1/types/migratable_resource.py +++ b/google/cloud/aiplatform_v1beta1/types/migratable_resource.py @@ -22,10 +22,7 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", - manifest={ - "MigratableResource", - }, + package="google.cloud.aiplatform.v1beta1", manifest={"MigratableResource",}, ) @@ -155,43 +152,27 @@ class DataLabelingAnnotatedDataset(proto.Message): ) ml_engine_model_version = proto.Field( - proto.MESSAGE, - number=1, - oneof="resource", - message=MlEngineModelVersion, + proto.MESSAGE, number=1, oneof="resource", message=MlEngineModelVersion, ) automl_model = proto.Field( - proto.MESSAGE, - number=2, - oneof="resource", - message=AutomlModel, + proto.MESSAGE, number=2, oneof="resource", message=AutomlModel, ) automl_dataset = proto.Field( - proto.MESSAGE, - number=3, - oneof="resource", - message=AutomlDataset, + proto.MESSAGE, number=3, oneof="resource", message=AutomlDataset, ) data_labeling_dataset = proto.Field( - proto.MESSAGE, - number=4, - oneof="resource", - message=DataLabelingDataset, + proto.MESSAGE, number=4, oneof="resource", message=DataLabelingDataset, ) last_migrate_time = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp.Timestamp, + proto.MESSAGE, number=5, message=timestamp.Timestamp, ) last_update_time = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp.Timestamp, + proto.MESSAGE, number=6, message=timestamp.Timestamp, ) diff --git a/google/cloud/aiplatform_v1beta1/types/migration_service.py b/google/cloud/aiplatform_v1beta1/types/migration_service.py index cd31a3283c..46b0cdc66b 100644 --- a/google/cloud/aiplatform_v1beta1/types/migration_service.py +++ b/google/cloud/aiplatform_v1beta1/types/migration_service.py @@ -82,9 +82,7 @@ def raw_page(self): return self migratable_resources = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_migratable_resource.MigratableResource, + proto.MESSAGE, number=1, message=gca_migratable_resource.MigratableResource, ) next_page_token = proto.Field(proto.STRING, number=2) @@ -108,9 +106,7 @@ class BatchMigrateResourcesRequest(proto.Message): parent = proto.Field(proto.STRING, number=1) migrate_resource_requests = proto.RepeatedField( - proto.MESSAGE, - number=2, - message="MigrateResourceRequest", + proto.MESSAGE, number=2, message="MigrateResourceRequest", ) @@ -255,17 +251,11 @@ class MigrateDataLabelingAnnotatedDatasetConfig(proto.Message): ) migrate_automl_model_config = proto.Field( - proto.MESSAGE, - number=2, - oneof="request", - message=MigrateAutomlModelConfig, + proto.MESSAGE, number=2, oneof="request", message=MigrateAutomlModelConfig, ) migrate_automl_dataset_config = proto.Field( - proto.MESSAGE, - number=3, - oneof="request", - message=MigrateAutomlDatasetConfig, + proto.MESSAGE, number=3, oneof="request", message=MigrateAutomlDatasetConfig, ) migrate_data_labeling_dataset_config = proto.Field( @@ -286,9 +276,7 @@ class BatchMigrateResourcesResponse(proto.Message): """ migrate_resource_responses = proto.RepeatedField( - proto.MESSAGE, - number=1, - message="MigrateResourceResponse", + proto.MESSAGE, number=1, message="MigrateResourceResponse", ) @@ -311,9 +299,7 @@ class MigrateResourceResponse(proto.Message): model = proto.Field(proto.STRING, number=2, oneof="migrated_resource") migratable_resource = proto.Field( - proto.MESSAGE, - number=3, - message=gca_migratable_resource.MigratableResource, + proto.MESSAGE, number=3, message=gca_migratable_resource.MigratableResource, ) @@ -327,9 +313,7 @@ class BatchMigrateResourcesOperationMetadata(proto.Message): """ generic_metadata = proto.Field( - proto.MESSAGE, - number=1, - message=operation.GenericOperationMetadata, + proto.MESSAGE, number=1, message=operation.GenericOperationMetadata, ) diff --git a/google/cloud/aiplatform_v1beta1/types/model.py b/google/cloud/aiplatform_v1beta1/types/model.py index 08528748dc..21e8c41034 100644 --- a/google/cloud/aiplatform_v1beta1/types/model.py +++ b/google/cloud/aiplatform_v1beta1/types/model.py @@ -27,12 +27,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1", - manifest={ - "Model", - "PredictSchemata", - "ModelContainerSpec", - "Port", - }, + manifest={"Model", "PredictSchemata", "ModelContainerSpec", "Port",}, ) @@ -279,9 +274,7 @@ class ExportableContent(proto.Enum): id = proto.Field(proto.STRING, number=1) exportable_contents = proto.RepeatedField( - proto.ENUM, - number=2, - enum="Model.ExportFormat.ExportableContent", + proto.ENUM, number=2, enum="Model.ExportFormat.ExportableContent", ) name = proto.Field(proto.STRING, number=1) @@ -290,68 +283,40 @@ class ExportableContent(proto.Enum): description = proto.Field(proto.STRING, number=3) - predict_schemata = proto.Field( - proto.MESSAGE, - number=4, - message="PredictSchemata", - ) + predict_schemata = proto.Field(proto.MESSAGE, number=4, message="PredictSchemata",) metadata_schema_uri = proto.Field(proto.STRING, number=5) - metadata = proto.Field( - proto.MESSAGE, - number=6, - message=struct.Value, - ) + metadata = proto.Field(proto.MESSAGE, number=6, message=struct.Value,) supported_export_formats = proto.RepeatedField( - proto.MESSAGE, - number=20, - message=ExportFormat, + proto.MESSAGE, number=20, message=ExportFormat, ) training_pipeline = proto.Field(proto.STRING, number=7) - container_spec = proto.Field( - proto.MESSAGE, - number=9, - message="ModelContainerSpec", - ) + container_spec = proto.Field(proto.MESSAGE, number=9, message="ModelContainerSpec",) artifact_uri = proto.Field(proto.STRING, number=26) supported_deployment_resources_types = proto.RepeatedField( - proto.ENUM, - number=10, - enum=DeploymentResourcesType, + proto.ENUM, number=10, enum=DeploymentResourcesType, ) supported_input_storage_formats = proto.RepeatedField(proto.STRING, number=11) supported_output_storage_formats = proto.RepeatedField(proto.STRING, number=12) - create_time = proto.Field( - proto.MESSAGE, - number=13, - message=timestamp.Timestamp, - ) + create_time = proto.Field(proto.MESSAGE, number=13, message=timestamp.Timestamp,) - update_time = proto.Field( - proto.MESSAGE, - number=14, - message=timestamp.Timestamp, - ) + update_time = proto.Field(proto.MESSAGE, number=14, message=timestamp.Timestamp,) deployed_models = proto.RepeatedField( - proto.MESSAGE, - number=15, - message=deployed_model_ref.DeployedModelRef, + proto.MESSAGE, number=15, message=deployed_model_ref.DeployedModelRef, ) explanation_spec = proto.Field( - proto.MESSAGE, - number=23, - message=explanation.ExplanationSpec, + proto.MESSAGE, number=23, message=explanation.ExplanationSpec, ) etag = proto.Field(proto.STRING, number=16) @@ -658,17 +623,9 @@ class ModelContainerSpec(proto.Message): args = proto.RepeatedField(proto.STRING, number=3) - env = proto.RepeatedField( - proto.MESSAGE, - number=4, - message=env_var.EnvVar, - ) + env = proto.RepeatedField(proto.MESSAGE, number=4, message=env_var.EnvVar,) - ports = proto.RepeatedField( - proto.MESSAGE, - number=5, - message="Port", - ) + ports = proto.RepeatedField(proto.MESSAGE, number=5, message="Port",) predict_route = proto.Field(proto.STRING, number=6) diff --git a/google/cloud/aiplatform_v1beta1/types/model_evaluation.py b/google/cloud/aiplatform_v1beta1/types/model_evaluation.py index 7a55d1e7fc..b768ed978e 100644 --- a/google/cloud/aiplatform_v1beta1/types/model_evaluation.py +++ b/google/cloud/aiplatform_v1beta1/types/model_evaluation.py @@ -24,10 +24,7 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", - manifest={ - "ModelEvaluation", - }, + package="google.cloud.aiplatform.v1beta1", manifest={"ModelEvaluation",}, ) @@ -74,24 +71,14 @@ class ModelEvaluation(proto.Message): metrics_schema_uri = proto.Field(proto.STRING, number=2) - metrics = proto.Field( - proto.MESSAGE, - number=3, - message=struct.Value, - ) + metrics = proto.Field(proto.MESSAGE, number=3, message=struct.Value,) - create_time = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp.Timestamp, - ) + create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) slice_dimensions = proto.RepeatedField(proto.STRING, number=5) model_explanation = proto.Field( - proto.MESSAGE, - number=8, - message=explanation.ModelExplanation, + proto.MESSAGE, number=8, message=explanation.ModelExplanation, ) diff --git a/google/cloud/aiplatform_v1beta1/types/model_evaluation_slice.py b/google/cloud/aiplatform_v1beta1/types/model_evaluation_slice.py index af37ef736c..1039d32c1f 100644 --- a/google/cloud/aiplatform_v1beta1/types/model_evaluation_slice.py +++ b/google/cloud/aiplatform_v1beta1/types/model_evaluation_slice.py @@ -23,10 +23,7 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", - manifest={ - "ModelEvaluationSlice", - }, + package="google.cloud.aiplatform.v1beta1", manifest={"ModelEvaluationSlice",}, ) @@ -82,25 +79,13 @@ class Slice(proto.Message): name = proto.Field(proto.STRING, number=1) - slice_ = proto.Field( - proto.MESSAGE, - number=2, - message=Slice, - ) + slice_ = proto.Field(proto.MESSAGE, number=2, message=Slice,) metrics_schema_uri = proto.Field(proto.STRING, number=3) - metrics = proto.Field( - proto.MESSAGE, - number=4, - message=struct.Value, - ) - - create_time = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp.Timestamp, - ) + metrics = proto.Field(proto.MESSAGE, number=4, message=struct.Value,) + + create_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/aiplatform_v1beta1/types/model_service.py b/google/cloud/aiplatform_v1beta1/types/model_service.py index 4b783e7fa8..3cfb17ad2c 100644 --- a/google/cloud/aiplatform_v1beta1/types/model_service.py +++ b/google/cloud/aiplatform_v1beta1/types/model_service.py @@ -65,11 +65,7 @@ class UploadModelRequest(proto.Message): parent = proto.Field(proto.STRING, number=1) - model = proto.Field( - proto.MESSAGE, - number=2, - message=gca_model.Model, - ) + model = proto.Field(proto.MESSAGE, number=2, message=gca_model.Model,) class UploadModelOperationMetadata(proto.Message): @@ -83,9 +79,7 @@ class UploadModelOperationMetadata(proto.Message): """ generic_metadata = proto.Field( - proto.MESSAGE, - number=1, - message=operation.GenericOperationMetadata, + proto.MESSAGE, number=1, message=operation.GenericOperationMetadata, ) @@ -147,11 +141,7 @@ class ListModelsRequest(proto.Message): page_token = proto.Field(proto.STRING, number=4) - read_mask = proto.Field( - proto.MESSAGE, - number=5, - message=field_mask.FieldMask, - ) + read_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask.FieldMask,) class ListModelsResponse(proto.Message): @@ -171,11 +161,7 @@ class ListModelsResponse(proto.Message): def raw_page(self): return self - models = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_model.Model, - ) + models = proto.RepeatedField(proto.MESSAGE, number=1, message=gca_model.Model,) next_page_token = proto.Field(proto.STRING, number=2) @@ -195,17 +181,9 @@ class UpdateModelRequest(proto.Message): [FieldMask](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask). """ - model = proto.Field( - proto.MESSAGE, - number=1, - message=gca_model.Model, - ) + model = proto.Field(proto.MESSAGE, number=1, message=gca_model.Model,) - update_mask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask.FieldMask, - ) + update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) class DeleteModelRequest(proto.Message): @@ -267,24 +245,16 @@ class OutputConfig(proto.Message): export_format_id = proto.Field(proto.STRING, number=1) artifact_destination = proto.Field( - proto.MESSAGE, - number=3, - message=io.GcsDestination, + proto.MESSAGE, number=3, message=io.GcsDestination, ) image_destination = proto.Field( - proto.MESSAGE, - number=4, - message=io.ContainerRegistryDestination, + proto.MESSAGE, number=4, message=io.ContainerRegistryDestination, ) name = proto.Field(proto.STRING, number=1) - output_config = proto.Field( - proto.MESSAGE, - number=2, - message=OutputConfig, - ) + output_config = proto.Field(proto.MESSAGE, number=2, message=OutputConfig,) class ExportModelOperationMetadata(proto.Message): @@ -322,16 +292,10 @@ class OutputInfo(proto.Message): image_output_uri = proto.Field(proto.STRING, number=3) generic_metadata = proto.Field( - proto.MESSAGE, - number=1, - message=operation.GenericOperationMetadata, + proto.MESSAGE, number=1, message=operation.GenericOperationMetadata, ) - output_info = proto.Field( - proto.MESSAGE, - number=2, - message=OutputInfo, - ) + output_info = proto.Field(proto.MESSAGE, number=2, message=OutputInfo,) class ExportModelResponse(proto.Message): @@ -386,11 +350,7 @@ class ListModelEvaluationsRequest(proto.Message): page_token = proto.Field(proto.STRING, number=4) - read_mask = proto.Field( - proto.MESSAGE, - number=5, - message=field_mask.FieldMask, - ) + read_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask.FieldMask,) class ListModelEvaluationsResponse(proto.Message): @@ -412,9 +372,7 @@ def raw_page(self): return self model_evaluations = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=model_evaluation.ModelEvaluation, + proto.MESSAGE, number=1, message=model_evaluation.ModelEvaluation, ) next_page_token = proto.Field(proto.STRING, number=2) @@ -469,11 +427,7 @@ class ListModelEvaluationSlicesRequest(proto.Message): page_token = proto.Field(proto.STRING, number=4) - read_mask = proto.Field( - proto.MESSAGE, - number=5, - message=field_mask.FieldMask, - ) + read_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask.FieldMask,) class ListModelEvaluationSlicesResponse(proto.Message): @@ -495,9 +449,7 @@ def raw_page(self): return self model_evaluation_slices = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=model_evaluation_slice.ModelEvaluationSlice, + proto.MESSAGE, number=1, message=model_evaluation_slice.ModelEvaluationSlice, ) next_page_token = proto.Field(proto.STRING, number=2) diff --git a/google/cloud/aiplatform_v1beta1/types/operation.py b/google/cloud/aiplatform_v1beta1/types/operation.py index c9d084cbfa..68fb0daead 100644 --- a/google/cloud/aiplatform_v1beta1/types/operation.py +++ b/google/cloud/aiplatform_v1beta1/types/operation.py @@ -24,10 +24,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1", - manifest={ - "GenericOperationMetadata", - "DeleteOperationMetadata", - }, + manifest={"GenericOperationMetadata", "DeleteOperationMetadata",}, ) @@ -52,22 +49,12 @@ class GenericOperationMetadata(proto.Message): """ partial_failures = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=status.Status, + proto.MESSAGE, number=1, message=status.Status, ) - create_time = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp.Timestamp, - ) + create_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) - update_time = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp.Timestamp, - ) + update_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,) class DeleteOperationMetadata(proto.Message): @@ -79,9 +66,7 @@ class DeleteOperationMetadata(proto.Message): """ generic_metadata = proto.Field( - proto.MESSAGE, - number=1, - message="GenericOperationMetadata", + proto.MESSAGE, number=1, message="GenericOperationMetadata", ) diff --git a/google/cloud/aiplatform_v1beta1/types/pipeline_service.py b/google/cloud/aiplatform_v1beta1/types/pipeline_service.py index 208ed5006a..9f0856732d 100644 --- a/google/cloud/aiplatform_v1beta1/types/pipeline_service.py +++ b/google/cloud/aiplatform_v1beta1/types/pipeline_service.py @@ -53,9 +53,7 @@ class CreateTrainingPipelineRequest(proto.Message): parent = proto.Field(proto.STRING, number=1) training_pipeline = proto.Field( - proto.MESSAGE, - number=2, - message=gca_training_pipeline.TrainingPipeline, + proto.MESSAGE, number=2, message=gca_training_pipeline.TrainingPipeline, ) @@ -118,11 +116,7 @@ class ListTrainingPipelinesRequest(proto.Message): page_token = proto.Field(proto.STRING, number=4) - read_mask = proto.Field( - proto.MESSAGE, - number=5, - message=field_mask.FieldMask, - ) + read_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask.FieldMask,) class ListTrainingPipelinesResponse(proto.Message): @@ -144,9 +138,7 @@ def raw_page(self): return self training_pipelines = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_training_pipeline.TrainingPipeline, + proto.MESSAGE, number=1, message=gca_training_pipeline.TrainingPipeline, ) next_page_token = proto.Field(proto.STRING, number=2) diff --git a/google/cloud/aiplatform_v1beta1/types/pipeline_state.py b/google/cloud/aiplatform_v1beta1/types/pipeline_state.py index 9c52592838..cede653bd6 100644 --- a/google/cloud/aiplatform_v1beta1/types/pipeline_state.py +++ b/google/cloud/aiplatform_v1beta1/types/pipeline_state.py @@ -19,10 +19,7 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", - manifest={ - "PipelineState", - }, + package="google.cloud.aiplatform.v1beta1", manifest={"PipelineState",}, ) diff --git a/google/cloud/aiplatform_v1beta1/types/prediction_service.py b/google/cloud/aiplatform_v1beta1/types/prediction_service.py index 3e5f8d7be8..b000f88bf8 100644 --- a/google/cloud/aiplatform_v1beta1/types/prediction_service.py +++ b/google/cloud/aiplatform_v1beta1/types/prediction_service.py @@ -65,17 +65,9 @@ class PredictRequest(proto.Message): endpoint = proto.Field(proto.STRING, number=1) - instances = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=struct.Value, - ) + instances = proto.RepeatedField(proto.MESSAGE, number=2, message=struct.Value,) - parameters = proto.Field( - proto.MESSAGE, - number=3, - message=struct.Value, - ) + parameters = proto.Field(proto.MESSAGE, number=3, message=struct.Value,) class PredictResponse(proto.Message): @@ -95,11 +87,7 @@ class PredictResponse(proto.Message): served this prediction. """ - predictions = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=struct.Value, - ) + predictions = proto.RepeatedField(proto.MESSAGE, number=1, message=struct.Value,) deployed_model_id = proto.Field(proto.STRING, number=2) @@ -140,17 +128,9 @@ class ExplainRequest(proto.Message): endpoint = proto.Field(proto.STRING, number=1) - instances = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=struct.Value, - ) + instances = proto.RepeatedField(proto.MESSAGE, number=2, message=struct.Value,) - parameters = proto.Field( - proto.MESSAGE, - number=4, - message=struct.Value, - ) + parameters = proto.Field(proto.MESSAGE, number=4, message=struct.Value,) deployed_model_id = proto.Field(proto.STRING, number=3) @@ -177,18 +157,12 @@ class ExplainResponse(proto.Message): """ explanations = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=explanation.Explanation, + proto.MESSAGE, number=1, message=explanation.Explanation, ) deployed_model_id = proto.Field(proto.STRING, number=2) - predictions = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=struct.Value, - ) + predictions = proto.RepeatedField(proto.MESSAGE, number=3, message=struct.Value,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/aiplatform_v1beta1/types/specialist_pool.py b/google/cloud/aiplatform_v1beta1/types/specialist_pool.py index 9b23b5c3c1..4ac8c6a709 100644 --- a/google/cloud/aiplatform_v1beta1/types/specialist_pool.py +++ b/google/cloud/aiplatform_v1beta1/types/specialist_pool.py @@ -19,10 +19,7 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", - manifest={ - "SpecialistPool", - }, + package="google.cloud.aiplatform.v1beta1", manifest={"SpecialistPool",}, ) diff --git a/google/cloud/aiplatform_v1beta1/types/specialist_pool_service.py b/google/cloud/aiplatform_v1beta1/types/specialist_pool_service.py index 811ac554ce..724f7165a6 100644 --- a/google/cloud/aiplatform_v1beta1/types/specialist_pool_service.py +++ b/google/cloud/aiplatform_v1beta1/types/specialist_pool_service.py @@ -54,9 +54,7 @@ class CreateSpecialistPoolRequest(proto.Message): parent = proto.Field(proto.STRING, number=1) specialist_pool = proto.Field( - proto.MESSAGE, - number=2, - message=gca_specialist_pool.SpecialistPool, + proto.MESSAGE, number=2, message=gca_specialist_pool.SpecialistPool, ) @@ -70,9 +68,7 @@ class CreateSpecialistPoolOperationMetadata(proto.Message): """ generic_metadata = proto.Field( - proto.MESSAGE, - number=1, - message=operation.GenericOperationMetadata, + proto.MESSAGE, number=1, message=operation.GenericOperationMetadata, ) @@ -118,11 +114,7 @@ class ListSpecialistPoolsRequest(proto.Message): page_token = proto.Field(proto.STRING, number=3) - read_mask = proto.Field( - proto.MESSAGE, - number=4, - message=field_mask.FieldMask, - ) + read_mask = proto.Field(proto.MESSAGE, number=4, message=field_mask.FieldMask,) class ListSpecialistPoolsResponse(proto.Message): @@ -142,9 +134,7 @@ def raw_page(self): return self specialist_pools = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_specialist_pool.SpecialistPool, + proto.MESSAGE, number=1, message=gca_specialist_pool.SpecialistPool, ) next_page_token = proto.Field(proto.STRING, number=2) @@ -185,16 +175,10 @@ class UpdateSpecialistPoolRequest(proto.Message): """ specialist_pool = proto.Field( - proto.MESSAGE, - number=1, - message=gca_specialist_pool.SpecialistPool, + proto.MESSAGE, number=1, message=gca_specialist_pool.SpecialistPool, ) - update_mask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask.FieldMask, - ) + update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) class UpdateSpecialistPoolOperationMetadata(proto.Message): @@ -214,9 +198,7 @@ class UpdateSpecialistPoolOperationMetadata(proto.Message): specialist_pool = proto.Field(proto.STRING, number=1) generic_metadata = proto.Field( - proto.MESSAGE, - number=2, - message=operation.GenericOperationMetadata, + proto.MESSAGE, number=2, message=operation.GenericOperationMetadata, ) diff --git a/google/cloud/aiplatform_v1beta1/types/study.py b/google/cloud/aiplatform_v1beta1/types/study.py index 06abf97ac1..2d6f4ae8c3 100644 --- a/google/cloud/aiplatform_v1beta1/types/study.py +++ b/google/cloud/aiplatform_v1beta1/types/study.py @@ -24,11 +24,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1", - manifest={ - "Trial", - "StudySpec", - "Measurement", - }, + manifest={"Trial", "StudySpec", "Measurement",}, ) @@ -86,43 +82,19 @@ class Parameter(proto.Message): parameter_id = proto.Field(proto.STRING, number=1) - value = proto.Field( - proto.MESSAGE, - number=2, - message=struct.Value, - ) + value = proto.Field(proto.MESSAGE, number=2, message=struct.Value,) id = proto.Field(proto.STRING, number=2) - state = proto.Field( - proto.ENUM, - number=3, - enum=State, - ) - - parameters = proto.RepeatedField( - proto.MESSAGE, - number=4, - message=Parameter, - ) - - final_measurement = proto.Field( - proto.MESSAGE, - number=5, - message="Measurement", - ) - - start_time = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp.Timestamp, - ) - - end_time = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp.Timestamp, - ) + state = proto.Field(proto.ENUM, number=3, enum=State,) + + parameters = proto.RepeatedField(proto.MESSAGE, number=4, message=Parameter,) + + final_measurement = proto.Field(proto.MESSAGE, number=5, message="Measurement",) + + start_time = proto.Field(proto.MESSAGE, number=7, message=timestamp.Timestamp,) + + end_time = proto.Field(proto.MESSAGE, number=8, message=timestamp.Timestamp,) custom_job = proto.Field(proto.STRING, number=11) @@ -166,11 +138,7 @@ class GoalType(proto.Enum): metric_id = proto.Field(proto.STRING, number=1) - goal = proto.Field( - proto.ENUM, - number=2, - enum="StudySpec.MetricSpec.GoalType", - ) + goal = proto.Field(proto.ENUM, number=2, enum="StudySpec.MetricSpec.GoalType",) class ParameterSpec(proto.Message): r"""Represents a single parameter to optimize. @@ -345,9 +313,7 @@ class CategoricalValueCondition(proto.Message): ) parameter_spec = proto.Field( - proto.MESSAGE, - number=1, - message="StudySpec.ParameterSpec", + proto.MESSAGE, number=1, message="StudySpec.ParameterSpec", ) double_value_spec = proto.Field( @@ -381,9 +347,7 @@ class CategoricalValueCondition(proto.Message): parameter_id = proto.Field(proto.STRING, number=1) scale_type = proto.Field( - proto.ENUM, - number=6, - enum="StudySpec.ParameterSpec.ScaleType", + proto.ENUM, number=6, enum="StudySpec.ParameterSpec.ScaleType", ) conditional_parameter_specs = proto.RepeatedField( @@ -392,23 +356,11 @@ class CategoricalValueCondition(proto.Message): message="StudySpec.ParameterSpec.ConditionalParameterSpec", ) - metrics = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=MetricSpec, - ) + metrics = proto.RepeatedField(proto.MESSAGE, number=1, message=MetricSpec,) - parameters = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=ParameterSpec, - ) + parameters = proto.RepeatedField(proto.MESSAGE, number=2, message=ParameterSpec,) - algorithm = proto.Field( - proto.ENUM, - number=3, - enum=Algorithm, - ) + algorithm = proto.Field(proto.ENUM, number=3, enum=Algorithm,) class Measurement(proto.Message): @@ -445,11 +397,7 @@ class Metric(proto.Message): step_count = proto.Field(proto.INT64, number=2) - metrics = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=Metric, - ) + metrics = proto.RepeatedField(proto.MESSAGE, number=3, message=Metric,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/aiplatform_v1beta1/types/training_pipeline.py b/google/cloud/aiplatform_v1beta1/types/training_pipeline.py index 64ef852c5e..f1f0debaf9 100644 --- a/google/cloud/aiplatform_v1beta1/types/training_pipeline.py +++ b/google/cloud/aiplatform_v1beta1/types/training_pipeline.py @@ -146,67 +146,27 @@ class TrainingPipeline(proto.Message): display_name = proto.Field(proto.STRING, number=2) - input_data_config = proto.Field( - proto.MESSAGE, - number=3, - message="InputDataConfig", - ) + input_data_config = proto.Field(proto.MESSAGE, number=3, message="InputDataConfig",) training_task_definition = proto.Field(proto.STRING, number=4) - training_task_inputs = proto.Field( - proto.MESSAGE, - number=5, - message=struct.Value, - ) + training_task_inputs = proto.Field(proto.MESSAGE, number=5, message=struct.Value,) - training_task_metadata = proto.Field( - proto.MESSAGE, - number=6, - message=struct.Value, - ) + training_task_metadata = proto.Field(proto.MESSAGE, number=6, message=struct.Value,) - model_to_upload = proto.Field( - proto.MESSAGE, - number=7, - message=model.Model, - ) + model_to_upload = proto.Field(proto.MESSAGE, number=7, message=model.Model,) - state = proto.Field( - proto.ENUM, - number=9, - enum=pipeline_state.PipelineState, - ) + state = proto.Field(proto.ENUM, number=9, enum=pipeline_state.PipelineState,) - error = proto.Field( - proto.MESSAGE, - number=10, - message=status.Status, - ) + error = proto.Field(proto.MESSAGE, number=10, message=status.Status,) - create_time = proto.Field( - proto.MESSAGE, - number=11, - message=timestamp.Timestamp, - ) + create_time = proto.Field(proto.MESSAGE, number=11, message=timestamp.Timestamp,) - start_time = proto.Field( - proto.MESSAGE, - number=12, - message=timestamp.Timestamp, - ) + start_time = proto.Field(proto.MESSAGE, number=12, message=timestamp.Timestamp,) - end_time = proto.Field( - proto.MESSAGE, - number=13, - message=timestamp.Timestamp, - ) + end_time = proto.Field(proto.MESSAGE, number=13, message=timestamp.Timestamp,) - update_time = proto.Field( - proto.MESSAGE, - number=14, - message=timestamp.Timestamp, - ) + update_time = proto.Field(proto.MESSAGE, number=14, message=timestamp.Timestamp,) labels = proto.MapField(proto.STRING, proto.STRING, number=15) @@ -327,45 +287,27 @@ class InputDataConfig(proto.Message): """ fraction_split = proto.Field( - proto.MESSAGE, - number=2, - oneof="split", - message="FractionSplit", + proto.MESSAGE, number=2, oneof="split", message="FractionSplit", ) filter_split = proto.Field( - proto.MESSAGE, - number=3, - oneof="split", - message="FilterSplit", + proto.MESSAGE, number=3, oneof="split", message="FilterSplit", ) predefined_split = proto.Field( - proto.MESSAGE, - number=4, - oneof="split", - message="PredefinedSplit", + proto.MESSAGE, number=4, oneof="split", message="PredefinedSplit", ) timestamp_split = proto.Field( - proto.MESSAGE, - number=5, - oneof="split", - message="TimestampSplit", + proto.MESSAGE, number=5, oneof="split", message="TimestampSplit", ) gcs_destination = proto.Field( - proto.MESSAGE, - number=8, - oneof="destination", - message=io.GcsDestination, + proto.MESSAGE, number=8, oneof="destination", message=io.GcsDestination, ) bigquery_destination = proto.Field( - proto.MESSAGE, - number=10, - oneof="destination", - message=io.BigQueryDestination, + proto.MESSAGE, number=10, oneof="destination", message=io.BigQueryDestination, ) dataset_id = proto.Field(proto.STRING, number=1) diff --git a/google/cloud/aiplatform_v1beta1/types/user_action_reference.py b/google/cloud/aiplatform_v1beta1/types/user_action_reference.py index 742ba69127..710e4a6d16 100644 --- a/google/cloud/aiplatform_v1beta1/types/user_action_reference.py +++ b/google/cloud/aiplatform_v1beta1/types/user_action_reference.py @@ -19,10 +19,7 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", - manifest={ - "UserActionReference", - }, + package="google.cloud.aiplatform.v1beta1", manifest={"UserActionReference",}, ) diff --git a/noxfile.py b/noxfile.py index 295cac5eb5..87765339b5 100644 --- a/noxfile.py +++ b/noxfile.py @@ -40,9 +40,7 @@ def lint(session): """ session.install("flake8", BLACK_VERSION) session.run( - "black", - "--check", - *BLACK_PATHS, + "black", "--check", *BLACK_PATHS, ) session.run("flake8", "google", "tests") @@ -59,8 +57,7 @@ def blacken(session): """ session.install(BLACK_VERSION) session.run( - "black", - *BLACK_PATHS, + "black", *BLACK_PATHS, ) @@ -76,9 +73,7 @@ def default(session): session.install("asyncmock", "pytest-asyncio") session.install( - "mock", - "pytest", - "pytest-cov", + "mock", "pytest", "pytest-cov", ) session.install("-e", ".") @@ -128,9 +123,7 @@ def system(session): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. session.install( - "mock", - "pytest", - "google-cloud-testutils", + "mock", "pytest", "google-cloud-testutils", ) session.install("-e", ".") diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_dataset_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_dataset_service.py index 411933eca6..51022d9fb7 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_dataset_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_dataset_service.py @@ -391,9 +391,7 @@ def test_dataset_service_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) + options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) @@ -461,8 +459,7 @@ def test_create_dataset( transport: str = "grpc", request_type=dataset_service.CreateDatasetRequest ): client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -495,8 +492,7 @@ async def test_create_dataset_async( transport: str = "grpc_asyncio", request_type=dataset_service.CreateDatasetRequest ): client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -528,9 +524,7 @@ async def test_create_dataset_async_from_dict(): def test_create_dataset_field_headers(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -550,17 +544,12 @@ def test_create_dataset_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_create_dataset_field_headers_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -582,16 +571,11 @@ async def test_create_dataset_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_create_dataset_flattened(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_dataset), "__call__") as call: @@ -601,8 +585,7 @@ def test_create_dataset_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_dataset( - parent="parent_value", - dataset=gca_dataset.Dataset(name="name_value"), + parent="parent_value", dataset=gca_dataset.Dataset(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -616,9 +599,7 @@ def test_create_dataset_flattened(): def test_create_dataset_flattened_error(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -632,9 +613,7 @@ def test_create_dataset_flattened_error(): @pytest.mark.asyncio async def test_create_dataset_flattened_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_dataset), "__call__") as call: @@ -647,8 +626,7 @@ async def test_create_dataset_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_dataset( - parent="parent_value", - dataset=gca_dataset.Dataset(name="name_value"), + parent="parent_value", dataset=gca_dataset.Dataset(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -663,9 +641,7 @@ async def test_create_dataset_flattened_async(): @pytest.mark.asyncio async def test_create_dataset_flattened_error_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -681,8 +657,7 @@ def test_get_dataset( transport: str = "grpc", request_type=dataset_service.GetDatasetRequest ): client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -729,8 +704,7 @@ async def test_get_dataset_async( transport: str = "grpc_asyncio", request_type=dataset_service.GetDatasetRequest ): client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -775,9 +749,7 @@ async def test_get_dataset_async_from_dict(): def test_get_dataset_field_headers(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -797,17 +769,12 @@ def test_get_dataset_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_dataset_field_headers_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -827,16 +794,11 @@ async def test_get_dataset_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_dataset_flattened(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_dataset), "__call__") as call: @@ -845,9 +807,7 @@ def test_get_dataset_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_dataset( - name="name_value", - ) + client.get_dataset(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -858,24 +818,19 @@ def test_get_dataset_flattened(): def test_get_dataset_flattened_error(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_dataset( - dataset_service.GetDatasetRequest(), - name="name_value", + dataset_service.GetDatasetRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_dataset_flattened_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_dataset), "__call__") as call: @@ -885,9 +840,7 @@ async def test_get_dataset_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataset.Dataset()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_dataset( - name="name_value", - ) + response = await client.get_dataset(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -899,16 +852,13 @@ async def test_get_dataset_flattened_async(): @pytest.mark.asyncio async def test_get_dataset_flattened_error_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_dataset( - dataset_service.GetDatasetRequest(), - name="name_value", + dataset_service.GetDatasetRequest(), name="name_value", ) @@ -916,8 +866,7 @@ def test_update_dataset( transport: str = "grpc", request_type=dataset_service.UpdateDatasetRequest ): client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -964,8 +913,7 @@ async def test_update_dataset_async( transport: str = "grpc_asyncio", request_type=dataset_service.UpdateDatasetRequest ): client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1010,9 +958,7 @@ async def test_update_dataset_async_from_dict(): def test_update_dataset_field_headers(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1032,17 +978,14 @@ def test_update_dataset_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "dataset.name=dataset.name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "dataset.name=dataset.name/value",) in kw[ + "metadata" + ] @pytest.mark.asyncio async def test_update_dataset_field_headers_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1062,16 +1005,13 @@ async def test_update_dataset_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "dataset.name=dataset.name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "dataset.name=dataset.name/value",) in kw[ + "metadata" + ] def test_update_dataset_flattened(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_dataset), "__call__") as call: @@ -1096,9 +1036,7 @@ def test_update_dataset_flattened(): def test_update_dataset_flattened_error(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1112,9 +1050,7 @@ def test_update_dataset_flattened_error(): @pytest.mark.asyncio async def test_update_dataset_flattened_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_dataset), "__call__") as call: @@ -1141,9 +1077,7 @@ async def test_update_dataset_flattened_async(): @pytest.mark.asyncio async def test_update_dataset_flattened_error_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1159,8 +1093,7 @@ def test_list_datasets( transport: str = "grpc", request_type=dataset_service.ListDatasetsRequest ): client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1198,8 +1131,7 @@ async def test_list_datasets_async( transport: str = "grpc_asyncio", request_type=dataset_service.ListDatasetsRequest ): client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1235,9 +1167,7 @@ async def test_list_datasets_async_from_dict(): def test_list_datasets_field_headers(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1257,17 +1187,12 @@ def test_list_datasets_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_datasets_field_headers_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1289,16 +1214,11 @@ async def test_list_datasets_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_datasets_flattened(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: @@ -1307,9 +1227,7 @@ def test_list_datasets_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_datasets( - parent="parent_value", - ) + client.list_datasets(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1320,24 +1238,19 @@ def test_list_datasets_flattened(): def test_list_datasets_flattened_error(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_datasets( - dataset_service.ListDatasetsRequest(), - parent="parent_value", + dataset_service.ListDatasetsRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_datasets_flattened_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: @@ -1349,9 +1262,7 @@ async def test_list_datasets_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_datasets( - parent="parent_value", - ) + response = await client.list_datasets(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1363,51 +1274,33 @@ async def test_list_datasets_flattened_async(): @pytest.mark.asyncio async def test_list_datasets_flattened_error_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_datasets( - dataset_service.ListDatasetsRequest(), - parent="parent_value", + dataset_service.ListDatasetsRequest(), parent="parent_value", ) def test_list_datasets_pager(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( dataset_service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - dataset.Dataset(), - ], + datasets=[dataset.Dataset(), dataset.Dataset(), dataset.Dataset(),], next_page_token="abc", ), + dataset_service.ListDatasetsResponse(datasets=[], next_page_token="def",), dataset_service.ListDatasetsResponse( - datasets=[], - next_page_token="def", - ), - dataset_service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - ], - next_page_token="ghi", + datasets=[dataset.Dataset(),], next_page_token="ghi", ), dataset_service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - ], + datasets=[dataset.Dataset(), dataset.Dataset(),], ), RuntimeError, ) @@ -1426,37 +1319,22 @@ def test_list_datasets_pager(): def test_list_datasets_pages(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( dataset_service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - dataset.Dataset(), - ], + datasets=[dataset.Dataset(), dataset.Dataset(), dataset.Dataset(),], next_page_token="abc", ), + dataset_service.ListDatasetsResponse(datasets=[], next_page_token="def",), dataset_service.ListDatasetsResponse( - datasets=[], - next_page_token="def", + datasets=[dataset.Dataset(),], next_page_token="ghi", ), dataset_service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - ], - next_page_token="ghi", - ), - dataset_service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - ], + datasets=[dataset.Dataset(), dataset.Dataset(),], ), RuntimeError, ) @@ -1467,9 +1345,7 @@ def test_list_datasets_pages(): @pytest.mark.asyncio async def test_list_datasets_async_pager(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1478,34 +1354,19 @@ async def test_list_datasets_async_pager(): # Set the response to a series of pages. call.side_effect = ( dataset_service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - dataset.Dataset(), - ], + datasets=[dataset.Dataset(), dataset.Dataset(), dataset.Dataset(),], next_page_token="abc", ), + dataset_service.ListDatasetsResponse(datasets=[], next_page_token="def",), dataset_service.ListDatasetsResponse( - datasets=[], - next_page_token="def", - ), - dataset_service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - ], - next_page_token="ghi", + datasets=[dataset.Dataset(),], next_page_token="ghi", ), dataset_service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - ], + datasets=[dataset.Dataset(), dataset.Dataset(),], ), RuntimeError, ) - async_pager = await client.list_datasets( - request={}, - ) + async_pager = await client.list_datasets(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -1517,9 +1378,7 @@ async def test_list_datasets_async_pager(): @pytest.mark.asyncio async def test_list_datasets_async_pages(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1528,28 +1387,15 @@ async def test_list_datasets_async_pages(): # Set the response to a series of pages. call.side_effect = ( dataset_service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - dataset.Dataset(), - ], + datasets=[dataset.Dataset(), dataset.Dataset(), dataset.Dataset(),], next_page_token="abc", ), + dataset_service.ListDatasetsResponse(datasets=[], next_page_token="def",), dataset_service.ListDatasetsResponse( - datasets=[], - next_page_token="def", + datasets=[dataset.Dataset(),], next_page_token="ghi", ), dataset_service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - ], - next_page_token="ghi", - ), - dataset_service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - ], + datasets=[dataset.Dataset(), dataset.Dataset(),], ), RuntimeError, ) @@ -1564,8 +1410,7 @@ def test_delete_dataset( transport: str = "grpc", request_type=dataset_service.DeleteDatasetRequest ): client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1598,8 +1443,7 @@ async def test_delete_dataset_async( transport: str = "grpc_asyncio", request_type=dataset_service.DeleteDatasetRequest ): client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1631,9 +1475,7 @@ async def test_delete_dataset_async_from_dict(): def test_delete_dataset_field_headers(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1653,17 +1495,12 @@ def test_delete_dataset_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_delete_dataset_field_headers_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1685,16 +1522,11 @@ async def test_delete_dataset_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_delete_dataset_flattened(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_dataset), "__call__") as call: @@ -1703,9 +1535,7 @@ def test_delete_dataset_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_dataset( - name="name_value", - ) + client.delete_dataset(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1716,24 +1546,19 @@ def test_delete_dataset_flattened(): def test_delete_dataset_flattened_error(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_dataset( - dataset_service.DeleteDatasetRequest(), - name="name_value", + dataset_service.DeleteDatasetRequest(), name="name_value", ) @pytest.mark.asyncio async def test_delete_dataset_flattened_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_dataset), "__call__") as call: @@ -1745,9 +1570,7 @@ async def test_delete_dataset_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_dataset( - name="name_value", - ) + response = await client.delete_dataset(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1759,16 +1582,13 @@ async def test_delete_dataset_flattened_async(): @pytest.mark.asyncio async def test_delete_dataset_flattened_error_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_dataset( - dataset_service.DeleteDatasetRequest(), - name="name_value", + dataset_service.DeleteDatasetRequest(), name="name_value", ) @@ -1776,8 +1596,7 @@ def test_import_data( transport: str = "grpc", request_type=dataset_service.ImportDataRequest ): client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1810,8 +1629,7 @@ async def test_import_data_async( transport: str = "grpc_asyncio", request_type=dataset_service.ImportDataRequest ): client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1843,9 +1661,7 @@ async def test_import_data_async_from_dict(): def test_import_data_field_headers(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1865,17 +1681,12 @@ def test_import_data_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_import_data_field_headers_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1897,16 +1708,11 @@ async def test_import_data_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_import_data_flattened(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.import_data), "__call__") as call: @@ -1935,9 +1741,7 @@ def test_import_data_flattened(): def test_import_data_flattened_error(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1953,9 +1757,7 @@ def test_import_data_flattened_error(): @pytest.mark.asyncio async def test_import_data_flattened_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.import_data), "__call__") as call: @@ -1988,9 +1790,7 @@ async def test_import_data_flattened_async(): @pytest.mark.asyncio async def test_import_data_flattened_error_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2008,8 +1808,7 @@ def test_export_data( transport: str = "grpc", request_type=dataset_service.ExportDataRequest ): client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2042,8 +1841,7 @@ async def test_export_data_async( transport: str = "grpc_asyncio", request_type=dataset_service.ExportDataRequest ): client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2075,9 +1873,7 @@ async def test_export_data_async_from_dict(): def test_export_data_field_headers(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2097,17 +1893,12 @@ def test_export_data_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_export_data_field_headers_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2129,16 +1920,11 @@ async def test_export_data_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_export_data_flattened(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.export_data), "__call__") as call: @@ -2171,9 +1957,7 @@ def test_export_data_flattened(): def test_export_data_flattened_error(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2191,9 +1975,7 @@ def test_export_data_flattened_error(): @pytest.mark.asyncio async def test_export_data_flattened_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.export_data), "__call__") as call: @@ -2230,9 +2012,7 @@ async def test_export_data_flattened_async(): @pytest.mark.asyncio async def test_export_data_flattened_error_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2252,8 +2032,7 @@ def test_list_data_items( transport: str = "grpc", request_type=dataset_service.ListDataItemsRequest ): client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2291,8 +2070,7 @@ async def test_list_data_items_async( transport: str = "grpc_asyncio", request_type=dataset_service.ListDataItemsRequest ): client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2328,9 +2106,7 @@ async def test_list_data_items_async_from_dict(): def test_list_data_items_field_headers(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2350,17 +2126,12 @@ def test_list_data_items_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_data_items_field_headers_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2382,16 +2153,11 @@ async def test_list_data_items_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_data_items_flattened(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_data_items), "__call__") as call: @@ -2400,9 +2166,7 @@ def test_list_data_items_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_data_items( - parent="parent_value", - ) + client.list_data_items(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -2413,24 +2177,19 @@ def test_list_data_items_flattened(): def test_list_data_items_flattened_error(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_data_items( - dataset_service.ListDataItemsRequest(), - parent="parent_value", + dataset_service.ListDataItemsRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_data_items_flattened_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_data_items), "__call__") as call: @@ -2442,9 +2201,7 @@ async def test_list_data_items_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_data_items( - parent="parent_value", - ) + response = await client.list_data_items(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -2456,23 +2213,18 @@ async def test_list_data_items_flattened_async(): @pytest.mark.asyncio async def test_list_data_items_flattened_error_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_data_items( - dataset_service.ListDataItemsRequest(), - parent="parent_value", + dataset_service.ListDataItemsRequest(), parent="parent_value", ) def test_list_data_items_pager(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_data_items), "__call__") as call: @@ -2487,20 +2239,13 @@ def test_list_data_items_pager(): next_page_token="abc", ), dataset_service.ListDataItemsResponse( - data_items=[], - next_page_token="def", + data_items=[], next_page_token="def", ), dataset_service.ListDataItemsResponse( - data_items=[ - data_item.DataItem(), - ], - next_page_token="ghi", + data_items=[data_item.DataItem(),], next_page_token="ghi", ), dataset_service.ListDataItemsResponse( - data_items=[ - data_item.DataItem(), - data_item.DataItem(), - ], + data_items=[data_item.DataItem(), data_item.DataItem(),], ), RuntimeError, ) @@ -2519,9 +2264,7 @@ def test_list_data_items_pager(): def test_list_data_items_pages(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_data_items), "__call__") as call: @@ -2536,20 +2279,13 @@ def test_list_data_items_pages(): next_page_token="abc", ), dataset_service.ListDataItemsResponse( - data_items=[], - next_page_token="def", + data_items=[], next_page_token="def", ), dataset_service.ListDataItemsResponse( - data_items=[ - data_item.DataItem(), - ], - next_page_token="ghi", + data_items=[data_item.DataItem(),], next_page_token="ghi", ), dataset_service.ListDataItemsResponse( - data_items=[ - data_item.DataItem(), - data_item.DataItem(), - ], + data_items=[data_item.DataItem(), data_item.DataItem(),], ), RuntimeError, ) @@ -2560,9 +2296,7 @@ def test_list_data_items_pages(): @pytest.mark.asyncio async def test_list_data_items_async_pager(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2579,26 +2313,17 @@ async def test_list_data_items_async_pager(): next_page_token="abc", ), dataset_service.ListDataItemsResponse( - data_items=[], - next_page_token="def", + data_items=[], next_page_token="def", ), dataset_service.ListDataItemsResponse( - data_items=[ - data_item.DataItem(), - ], - next_page_token="ghi", + data_items=[data_item.DataItem(),], next_page_token="ghi", ), dataset_service.ListDataItemsResponse( - data_items=[ - data_item.DataItem(), - data_item.DataItem(), - ], + data_items=[data_item.DataItem(), data_item.DataItem(),], ), RuntimeError, ) - async_pager = await client.list_data_items( - request={}, - ) + async_pager = await client.list_data_items(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -2610,9 +2335,7 @@ async def test_list_data_items_async_pager(): @pytest.mark.asyncio async def test_list_data_items_async_pages(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2629,20 +2352,13 @@ async def test_list_data_items_async_pages(): next_page_token="abc", ), dataset_service.ListDataItemsResponse( - data_items=[], - next_page_token="def", + data_items=[], next_page_token="def", ), dataset_service.ListDataItemsResponse( - data_items=[ - data_item.DataItem(), - ], - next_page_token="ghi", + data_items=[data_item.DataItem(),], next_page_token="ghi", ), dataset_service.ListDataItemsResponse( - data_items=[ - data_item.DataItem(), - data_item.DataItem(), - ], + data_items=[data_item.DataItem(), data_item.DataItem(),], ), RuntimeError, ) @@ -2657,8 +2373,7 @@ def test_get_annotation_spec( transport: str = "grpc", request_type=dataset_service.GetAnnotationSpecRequest ): client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2671,9 +2386,7 @@ def test_get_annotation_spec( ) as call: # Designate an appropriate return value for the call. call.return_value = annotation_spec.AnnotationSpec( - name="name_value", - display_name="display_name_value", - etag="etag_value", + name="name_value", display_name="display_name_value", etag="etag_value", ) response = client.get_annotation_spec(request) @@ -2705,8 +2418,7 @@ async def test_get_annotation_spec_async( request_type=dataset_service.GetAnnotationSpecRequest, ): client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2720,9 +2432,7 @@ async def test_get_annotation_spec_async( # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( annotation_spec.AnnotationSpec( - name="name_value", - display_name="display_name_value", - etag="etag_value", + name="name_value", display_name="display_name_value", etag="etag_value", ) ) @@ -2750,9 +2460,7 @@ async def test_get_annotation_spec_async_from_dict(): def test_get_annotation_spec_field_headers(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2774,17 +2482,12 @@ def test_get_annotation_spec_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_annotation_spec_field_headers_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2808,16 +2511,11 @@ async def test_get_annotation_spec_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_annotation_spec_flattened(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2828,9 +2526,7 @@ def test_get_annotation_spec_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_annotation_spec( - name="name_value", - ) + client.get_annotation_spec(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -2841,24 +2537,19 @@ def test_get_annotation_spec_flattened(): def test_get_annotation_spec_flattened_error(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_annotation_spec( - dataset_service.GetAnnotationSpecRequest(), - name="name_value", + dataset_service.GetAnnotationSpecRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_annotation_spec_flattened_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2872,9 +2563,7 @@ async def test_get_annotation_spec_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_annotation_spec( - name="name_value", - ) + response = await client.get_annotation_spec(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -2886,16 +2575,13 @@ async def test_get_annotation_spec_flattened_async(): @pytest.mark.asyncio async def test_get_annotation_spec_flattened_error_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_annotation_spec( - dataset_service.GetAnnotationSpecRequest(), - name="name_value", + dataset_service.GetAnnotationSpecRequest(), name="name_value", ) @@ -2903,8 +2589,7 @@ def test_list_annotations( transport: str = "grpc", request_type=dataset_service.ListAnnotationsRequest ): client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2942,8 +2627,7 @@ async def test_list_annotations_async( transport: str = "grpc_asyncio", request_type=dataset_service.ListAnnotationsRequest ): client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2979,9 +2663,7 @@ async def test_list_annotations_async_from_dict(): def test_list_annotations_field_headers(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3001,17 +2683,12 @@ def test_list_annotations_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_annotations_field_headers_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3033,16 +2710,11 @@ async def test_list_annotations_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_annotations_flattened(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_annotations), "__call__") as call: @@ -3051,9 +2723,7 @@ def test_list_annotations_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_annotations( - parent="parent_value", - ) + client.list_annotations(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -3064,24 +2734,19 @@ def test_list_annotations_flattened(): def test_list_annotations_flattened_error(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_annotations( - dataset_service.ListAnnotationsRequest(), - parent="parent_value", + dataset_service.ListAnnotationsRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_annotations_flattened_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_annotations), "__call__") as call: @@ -3093,9 +2758,7 @@ async def test_list_annotations_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_annotations( - parent="parent_value", - ) + response = await client.list_annotations(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -3107,23 +2770,18 @@ async def test_list_annotations_flattened_async(): @pytest.mark.asyncio async def test_list_annotations_flattened_error_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_annotations( - dataset_service.ListAnnotationsRequest(), - parent="parent_value", + dataset_service.ListAnnotationsRequest(), parent="parent_value", ) def test_list_annotations_pager(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_annotations), "__call__") as call: @@ -3138,20 +2796,13 @@ def test_list_annotations_pager(): next_page_token="abc", ), dataset_service.ListAnnotationsResponse( - annotations=[], - next_page_token="def", + annotations=[], next_page_token="def", ), dataset_service.ListAnnotationsResponse( - annotations=[ - annotation.Annotation(), - ], - next_page_token="ghi", + annotations=[annotation.Annotation(),], next_page_token="ghi", ), dataset_service.ListAnnotationsResponse( - annotations=[ - annotation.Annotation(), - annotation.Annotation(), - ], + annotations=[annotation.Annotation(), annotation.Annotation(),], ), RuntimeError, ) @@ -3170,9 +2821,7 @@ def test_list_annotations_pager(): def test_list_annotations_pages(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_annotations), "__call__") as call: @@ -3187,20 +2836,13 @@ def test_list_annotations_pages(): next_page_token="abc", ), dataset_service.ListAnnotationsResponse( - annotations=[], - next_page_token="def", + annotations=[], next_page_token="def", ), dataset_service.ListAnnotationsResponse( - annotations=[ - annotation.Annotation(), - ], - next_page_token="ghi", + annotations=[annotation.Annotation(),], next_page_token="ghi", ), dataset_service.ListAnnotationsResponse( - annotations=[ - annotation.Annotation(), - annotation.Annotation(), - ], + annotations=[annotation.Annotation(), annotation.Annotation(),], ), RuntimeError, ) @@ -3211,9 +2853,7 @@ def test_list_annotations_pages(): @pytest.mark.asyncio async def test_list_annotations_async_pager(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3230,26 +2870,17 @@ async def test_list_annotations_async_pager(): next_page_token="abc", ), dataset_service.ListAnnotationsResponse( - annotations=[], - next_page_token="def", + annotations=[], next_page_token="def", ), dataset_service.ListAnnotationsResponse( - annotations=[ - annotation.Annotation(), - ], - next_page_token="ghi", + annotations=[annotation.Annotation(),], next_page_token="ghi", ), dataset_service.ListAnnotationsResponse( - annotations=[ - annotation.Annotation(), - annotation.Annotation(), - ], + annotations=[annotation.Annotation(), annotation.Annotation(),], ), RuntimeError, ) - async_pager = await client.list_annotations( - request={}, - ) + async_pager = await client.list_annotations(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -3261,9 +2892,7 @@ async def test_list_annotations_async_pager(): @pytest.mark.asyncio async def test_list_annotations_async_pages(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3280,20 +2909,13 @@ async def test_list_annotations_async_pages(): next_page_token="abc", ), dataset_service.ListAnnotationsResponse( - annotations=[], - next_page_token="def", + annotations=[], next_page_token="def", ), dataset_service.ListAnnotationsResponse( - annotations=[ - annotation.Annotation(), - ], - next_page_token="ghi", + annotations=[annotation.Annotation(),], next_page_token="ghi", ), dataset_service.ListAnnotationsResponse( - annotations=[ - annotation.Annotation(), - annotation.Annotation(), - ], + annotations=[annotation.Annotation(), annotation.Annotation(),], ), RuntimeError, ) @@ -3311,8 +2933,7 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -3331,8 +2952,7 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = DatasetServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + client_options={"scopes": ["1", "2"]}, transport=transport, ) @@ -3377,13 +2997,8 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.DatasetServiceGrpcTransport, - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client.transport, transports.DatasetServiceGrpcTransport,) def test_dataset_service_base_transport_error(): @@ -3439,8 +3054,7 @@ def test_dataset_service_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (credentials.AnonymousCredentials(), None) transport = transports.DatasetServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", + credentials_file="credentials.json", quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -3510,8 +3124,7 @@ def test_dataset_service_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.DatasetServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -3523,8 +3136,7 @@ def test_dataset_service_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.DatasetServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -3621,16 +3233,12 @@ def test_dataset_service_transport_channel_mtls_with_adc(transport_class): def test_dataset_service_grpc_lro_client(): client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - transport="grpc", + credentials=credentials.AnonymousCredentials(), transport="grpc", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) + assert isinstance(transport.operations_client, operations_v1.OperationsClient,) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -3638,16 +3246,12 @@ def test_dataset_service_grpc_lro_client(): def test_dataset_service_grpc_lro_async_client(): client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport="grpc_asyncio", + credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) + assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -3727,10 +3331,7 @@ def test_data_item_path(): data_item = "nautilus" expected = "projects/{project}/locations/{location}/datasets/{dataset}/dataItems/{data_item}".format( - project=project, - location=location, - dataset=dataset, - data_item=data_item, + project=project, location=location, dataset=dataset, data_item=data_item, ) actual = DatasetServiceClient.data_item_path(project, location, dataset, data_item) assert expected == actual @@ -3756,9 +3357,7 @@ def test_dataset_path(): dataset = "oyster" expected = "projects/{project}/locations/{location}/datasets/{dataset}".format( - project=project, - location=location, - dataset=dataset, + project=project, location=location, dataset=dataset, ) actual = DatasetServiceClient.dataset_path(project, location, dataset) assert expected == actual @@ -3801,9 +3400,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "scallop" - expected = "folders/{folder}".format( - folder=folder, - ) + expected = "folders/{folder}".format(folder=folder,) actual = DatasetServiceClient.common_folder_path(folder) assert expected == actual @@ -3822,9 +3419,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "squid" - expected = "organizations/{organization}".format( - organization=organization, - ) + expected = "organizations/{organization}".format(organization=organization,) actual = DatasetServiceClient.common_organization_path(organization) assert expected == actual @@ -3843,9 +3438,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "whelk" - expected = "projects/{project}".format( - project=project, - ) + expected = "projects/{project}".format(project=project,) actual = DatasetServiceClient.common_project_path(project) assert expected == actual @@ -3866,8 +3459,7 @@ def test_common_location_path(): location = "nudibranch" expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, + project=project, location=location, ) actual = DatasetServiceClient.common_location_path(project, location) assert expected == actual @@ -3892,8 +3484,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.DatasetServiceTransport, "_prep_wrapped_messages" ) as prep: client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - client_info=client_info, + credentials=credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -3902,7 +3493,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = DatasetServiceClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), - client_info=client_info, + credentials=credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_endpoint_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_endpoint_service.py index 8994b2c8be..93c35a7a2a 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_endpoint_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_endpoint_service.py @@ -401,9 +401,7 @@ def test_endpoint_service_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) + options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) @@ -471,8 +469,7 @@ def test_create_endpoint( transport: str = "grpc", request_type=endpoint_service.CreateEndpointRequest ): client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -505,8 +502,7 @@ async def test_create_endpoint_async( transport: str = "grpc_asyncio", request_type=endpoint_service.CreateEndpointRequest ): client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -538,9 +534,7 @@ async def test_create_endpoint_async_from_dict(): def test_create_endpoint_field_headers(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -560,17 +554,12 @@ def test_create_endpoint_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_create_endpoint_field_headers_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -592,16 +581,11 @@ async def test_create_endpoint_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_create_endpoint_flattened(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_endpoint), "__call__") as call: @@ -611,8 +595,7 @@ def test_create_endpoint_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_endpoint( - parent="parent_value", - endpoint=gca_endpoint.Endpoint(name="name_value"), + parent="parent_value", endpoint=gca_endpoint.Endpoint(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -626,9 +609,7 @@ def test_create_endpoint_flattened(): def test_create_endpoint_flattened_error(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -642,9 +623,7 @@ def test_create_endpoint_flattened_error(): @pytest.mark.asyncio async def test_create_endpoint_flattened_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_endpoint), "__call__") as call: @@ -657,8 +636,7 @@ async def test_create_endpoint_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_endpoint( - parent="parent_value", - endpoint=gca_endpoint.Endpoint(name="name_value"), + parent="parent_value", endpoint=gca_endpoint.Endpoint(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -673,9 +651,7 @@ async def test_create_endpoint_flattened_async(): @pytest.mark.asyncio async def test_create_endpoint_flattened_error_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -691,8 +667,7 @@ def test_get_endpoint( transport: str = "grpc", request_type=endpoint_service.GetEndpointRequest ): client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -739,8 +714,7 @@ async def test_get_endpoint_async( transport: str = "grpc_asyncio", request_type=endpoint_service.GetEndpointRequest ): client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -785,9 +759,7 @@ async def test_get_endpoint_async_from_dict(): def test_get_endpoint_field_headers(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -807,17 +779,12 @@ def test_get_endpoint_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_endpoint_field_headers_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -837,16 +804,11 @@ async def test_get_endpoint_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_endpoint_flattened(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_endpoint), "__call__") as call: @@ -855,9 +817,7 @@ def test_get_endpoint_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_endpoint( - name="name_value", - ) + client.get_endpoint(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -868,24 +828,19 @@ def test_get_endpoint_flattened(): def test_get_endpoint_flattened_error(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_endpoint( - endpoint_service.GetEndpointRequest(), - name="name_value", + endpoint_service.GetEndpointRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_endpoint_flattened_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_endpoint), "__call__") as call: @@ -895,9 +850,7 @@ async def test_get_endpoint_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(endpoint.Endpoint()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_endpoint( - name="name_value", - ) + response = await client.get_endpoint(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -909,16 +862,13 @@ async def test_get_endpoint_flattened_async(): @pytest.mark.asyncio async def test_get_endpoint_flattened_error_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_endpoint( - endpoint_service.GetEndpointRequest(), - name="name_value", + endpoint_service.GetEndpointRequest(), name="name_value", ) @@ -926,8 +876,7 @@ def test_list_endpoints( transport: str = "grpc", request_type=endpoint_service.ListEndpointsRequest ): client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -965,8 +914,7 @@ async def test_list_endpoints_async( transport: str = "grpc_asyncio", request_type=endpoint_service.ListEndpointsRequest ): client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1002,9 +950,7 @@ async def test_list_endpoints_async_from_dict(): def test_list_endpoints_field_headers(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1024,17 +970,12 @@ def test_list_endpoints_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_endpoints_field_headers_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1056,16 +997,11 @@ async def test_list_endpoints_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_endpoints_flattened(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_endpoints), "__call__") as call: @@ -1074,9 +1010,7 @@ def test_list_endpoints_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_endpoints( - parent="parent_value", - ) + client.list_endpoints(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1087,24 +1021,19 @@ def test_list_endpoints_flattened(): def test_list_endpoints_flattened_error(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_endpoints( - endpoint_service.ListEndpointsRequest(), - parent="parent_value", + endpoint_service.ListEndpointsRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_endpoints_flattened_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_endpoints), "__call__") as call: @@ -1116,9 +1045,7 @@ async def test_list_endpoints_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_endpoints( - parent="parent_value", - ) + response = await client.list_endpoints(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1130,23 +1057,18 @@ async def test_list_endpoints_flattened_async(): @pytest.mark.asyncio async def test_list_endpoints_flattened_error_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_endpoints( - endpoint_service.ListEndpointsRequest(), - parent="parent_value", + endpoint_service.ListEndpointsRequest(), parent="parent_value", ) def test_list_endpoints_pager(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_endpoints), "__call__") as call: @@ -1161,20 +1083,13 @@ def test_list_endpoints_pager(): next_page_token="abc", ), endpoint_service.ListEndpointsResponse( - endpoints=[], - next_page_token="def", + endpoints=[], next_page_token="def", ), endpoint_service.ListEndpointsResponse( - endpoints=[ - endpoint.Endpoint(), - ], - next_page_token="ghi", + endpoints=[endpoint.Endpoint(),], next_page_token="ghi", ), endpoint_service.ListEndpointsResponse( - endpoints=[ - endpoint.Endpoint(), - endpoint.Endpoint(), - ], + endpoints=[endpoint.Endpoint(), endpoint.Endpoint(),], ), RuntimeError, ) @@ -1193,9 +1108,7 @@ def test_list_endpoints_pager(): def test_list_endpoints_pages(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_endpoints), "__call__") as call: @@ -1210,20 +1123,13 @@ def test_list_endpoints_pages(): next_page_token="abc", ), endpoint_service.ListEndpointsResponse( - endpoints=[], - next_page_token="def", + endpoints=[], next_page_token="def", ), endpoint_service.ListEndpointsResponse( - endpoints=[ - endpoint.Endpoint(), - ], - next_page_token="ghi", + endpoints=[endpoint.Endpoint(),], next_page_token="ghi", ), endpoint_service.ListEndpointsResponse( - endpoints=[ - endpoint.Endpoint(), - endpoint.Endpoint(), - ], + endpoints=[endpoint.Endpoint(), endpoint.Endpoint(),], ), RuntimeError, ) @@ -1234,9 +1140,7 @@ def test_list_endpoints_pages(): @pytest.mark.asyncio async def test_list_endpoints_async_pager(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1253,26 +1157,17 @@ async def test_list_endpoints_async_pager(): next_page_token="abc", ), endpoint_service.ListEndpointsResponse( - endpoints=[], - next_page_token="def", + endpoints=[], next_page_token="def", ), endpoint_service.ListEndpointsResponse( - endpoints=[ - endpoint.Endpoint(), - ], - next_page_token="ghi", + endpoints=[endpoint.Endpoint(),], next_page_token="ghi", ), endpoint_service.ListEndpointsResponse( - endpoints=[ - endpoint.Endpoint(), - endpoint.Endpoint(), - ], + endpoints=[endpoint.Endpoint(), endpoint.Endpoint(),], ), RuntimeError, ) - async_pager = await client.list_endpoints( - request={}, - ) + async_pager = await client.list_endpoints(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -1284,9 +1179,7 @@ async def test_list_endpoints_async_pager(): @pytest.mark.asyncio async def test_list_endpoints_async_pages(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1303,20 +1196,13 @@ async def test_list_endpoints_async_pages(): next_page_token="abc", ), endpoint_service.ListEndpointsResponse( - endpoints=[], - next_page_token="def", + endpoints=[], next_page_token="def", ), endpoint_service.ListEndpointsResponse( - endpoints=[ - endpoint.Endpoint(), - ], - next_page_token="ghi", + endpoints=[endpoint.Endpoint(),], next_page_token="ghi", ), endpoint_service.ListEndpointsResponse( - endpoints=[ - endpoint.Endpoint(), - endpoint.Endpoint(), - ], + endpoints=[endpoint.Endpoint(), endpoint.Endpoint(),], ), RuntimeError, ) @@ -1331,8 +1217,7 @@ def test_update_endpoint( transport: str = "grpc", request_type=endpoint_service.UpdateEndpointRequest ): client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1379,8 +1264,7 @@ async def test_update_endpoint_async( transport: str = "grpc_asyncio", request_type=endpoint_service.UpdateEndpointRequest ): client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1425,9 +1309,7 @@ async def test_update_endpoint_async_from_dict(): def test_update_endpoint_field_headers(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1447,17 +1329,14 @@ def test_update_endpoint_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "endpoint.name=endpoint.name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "endpoint.name=endpoint.name/value",) in kw[ + "metadata" + ] @pytest.mark.asyncio async def test_update_endpoint_field_headers_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1479,16 +1358,13 @@ async def test_update_endpoint_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "endpoint.name=endpoint.name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "endpoint.name=endpoint.name/value",) in kw[ + "metadata" + ] def test_update_endpoint_flattened(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_endpoint), "__call__") as call: @@ -1513,9 +1389,7 @@ def test_update_endpoint_flattened(): def test_update_endpoint_flattened_error(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1529,9 +1403,7 @@ def test_update_endpoint_flattened_error(): @pytest.mark.asyncio async def test_update_endpoint_flattened_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_endpoint), "__call__") as call: @@ -1560,9 +1432,7 @@ async def test_update_endpoint_flattened_async(): @pytest.mark.asyncio async def test_update_endpoint_flattened_error_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1578,8 +1448,7 @@ def test_delete_endpoint( transport: str = "grpc", request_type=endpoint_service.DeleteEndpointRequest ): client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1612,8 +1481,7 @@ async def test_delete_endpoint_async( transport: str = "grpc_asyncio", request_type=endpoint_service.DeleteEndpointRequest ): client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1645,9 +1513,7 @@ async def test_delete_endpoint_async_from_dict(): def test_delete_endpoint_field_headers(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1667,17 +1533,12 @@ def test_delete_endpoint_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_delete_endpoint_field_headers_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1699,16 +1560,11 @@ async def test_delete_endpoint_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_delete_endpoint_flattened(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_endpoint), "__call__") as call: @@ -1717,9 +1573,7 @@ def test_delete_endpoint_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_endpoint( - name="name_value", - ) + client.delete_endpoint(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1730,24 +1584,19 @@ def test_delete_endpoint_flattened(): def test_delete_endpoint_flattened_error(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_endpoint( - endpoint_service.DeleteEndpointRequest(), - name="name_value", + endpoint_service.DeleteEndpointRequest(), name="name_value", ) @pytest.mark.asyncio async def test_delete_endpoint_flattened_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_endpoint), "__call__") as call: @@ -1759,9 +1608,7 @@ async def test_delete_endpoint_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_endpoint( - name="name_value", - ) + response = await client.delete_endpoint(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1773,16 +1620,13 @@ async def test_delete_endpoint_flattened_async(): @pytest.mark.asyncio async def test_delete_endpoint_flattened_error_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_endpoint( - endpoint_service.DeleteEndpointRequest(), - name="name_value", + endpoint_service.DeleteEndpointRequest(), name="name_value", ) @@ -1790,8 +1634,7 @@ def test_deploy_model( transport: str = "grpc", request_type=endpoint_service.DeployModelRequest ): client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1824,8 +1667,7 @@ async def test_deploy_model_async( transport: str = "grpc_asyncio", request_type=endpoint_service.DeployModelRequest ): client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1857,9 +1699,7 @@ async def test_deploy_model_async_from_dict(): def test_deploy_model_field_headers(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1879,17 +1719,12 @@ def test_deploy_model_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "endpoint=endpoint/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "endpoint=endpoint/value",) in kw["metadata"] @pytest.mark.asyncio async def test_deploy_model_field_headers_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1911,16 +1746,11 @@ async def test_deploy_model_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "endpoint=endpoint/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "endpoint=endpoint/value",) in kw["metadata"] def test_deploy_model_flattened(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.deploy_model), "__call__") as call: @@ -1960,9 +1790,7 @@ def test_deploy_model_flattened(): def test_deploy_model_flattened_error(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1983,9 +1811,7 @@ def test_deploy_model_flattened_error(): @pytest.mark.asyncio async def test_deploy_model_flattened_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.deploy_model), "__call__") as call: @@ -2029,9 +1855,7 @@ async def test_deploy_model_flattened_async(): @pytest.mark.asyncio async def test_deploy_model_flattened_error_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2054,8 +1878,7 @@ def test_undeploy_model( transport: str = "grpc", request_type=endpoint_service.UndeployModelRequest ): client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2088,8 +1911,7 @@ async def test_undeploy_model_async( transport: str = "grpc_asyncio", request_type=endpoint_service.UndeployModelRequest ): client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2121,9 +1943,7 @@ async def test_undeploy_model_async_from_dict(): def test_undeploy_model_field_headers(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2143,17 +1963,12 @@ def test_undeploy_model_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "endpoint=endpoint/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "endpoint=endpoint/value",) in kw["metadata"] @pytest.mark.asyncio async def test_undeploy_model_field_headers_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2175,16 +1990,11 @@ async def test_undeploy_model_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "endpoint=endpoint/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "endpoint=endpoint/value",) in kw["metadata"] def test_undeploy_model_flattened(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.undeploy_model), "__call__") as call: @@ -2212,9 +2022,7 @@ def test_undeploy_model_flattened(): def test_undeploy_model_flattened_error(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2229,9 +2037,7 @@ def test_undeploy_model_flattened_error(): @pytest.mark.asyncio async def test_undeploy_model_flattened_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.undeploy_model), "__call__") as call: @@ -2263,9 +2069,7 @@ async def test_undeploy_model_flattened_async(): @pytest.mark.asyncio async def test_undeploy_model_flattened_error_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2285,8 +2089,7 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -2305,8 +2108,7 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = EndpointServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + client_options={"scopes": ["1", "2"]}, transport=transport, ) @@ -2351,13 +2153,8 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.EndpointServiceGrpcTransport, - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client.transport, transports.EndpointServiceGrpcTransport,) def test_endpoint_service_base_transport_error(): @@ -2410,8 +2207,7 @@ def test_endpoint_service_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (credentials.AnonymousCredentials(), None) transport = transports.EndpointServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", + credentials_file="credentials.json", quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -2481,8 +2277,7 @@ def test_endpoint_service_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.EndpointServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2494,8 +2289,7 @@ def test_endpoint_service_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.EndpointServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2592,16 +2386,12 @@ def test_endpoint_service_transport_channel_mtls_with_adc(transport_class): def test_endpoint_service_grpc_lro_client(): client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - transport="grpc", + credentials=credentials.AnonymousCredentials(), transport="grpc", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) + assert isinstance(transport.operations_client, operations_v1.OperationsClient,) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -2609,16 +2399,12 @@ def test_endpoint_service_grpc_lro_client(): def test_endpoint_service_grpc_lro_async_client(): client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport="grpc_asyncio", + credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) + assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -2630,9 +2416,7 @@ def test_endpoint_path(): endpoint = "whelk" expected = "projects/{project}/locations/{location}/endpoints/{endpoint}".format( - project=project, - location=location, - endpoint=endpoint, + project=project, location=location, endpoint=endpoint, ) actual = EndpointServiceClient.endpoint_path(project, location, endpoint) assert expected == actual @@ -2657,9 +2441,7 @@ def test_model_path(): model = "winkle" expected = "projects/{project}/locations/{location}/models/{model}".format( - project=project, - location=location, - model=model, + project=project, location=location, model=model, ) actual = EndpointServiceClient.model_path(project, location, model) assert expected == actual @@ -2702,9 +2484,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format( - folder=folder, - ) + expected = "folders/{folder}".format(folder=folder,) actual = EndpointServiceClient.common_folder_path(folder) assert expected == actual @@ -2723,9 +2503,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format( - organization=organization, - ) + expected = "organizations/{organization}".format(organization=organization,) actual = EndpointServiceClient.common_organization_path(organization) assert expected == actual @@ -2744,9 +2522,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format( - project=project, - ) + expected = "projects/{project}".format(project=project,) actual = EndpointServiceClient.common_project_path(project) assert expected == actual @@ -2767,8 +2543,7 @@ def test_common_location_path(): location = "nautilus" expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, + project=project, location=location, ) actual = EndpointServiceClient.common_location_path(project, location) assert expected == actual @@ -2793,8 +2568,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.EndpointServiceTransport, "_prep_wrapped_messages" ) as prep: client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - client_info=client_info, + credentials=credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2803,7 +2577,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = EndpointServiceClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), - client_info=client_info, + credentials=credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_job_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_job_service.py index f99ac1ce5d..f08d84bd2f 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_job_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_job_service.py @@ -399,9 +399,7 @@ def test_job_service_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) + options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) @@ -467,8 +465,7 @@ def test_create_custom_job( transport: str = "grpc", request_type=job_service.CreateCustomJobRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -514,8 +511,7 @@ async def test_create_custom_job_async( transport: str = "grpc_asyncio", request_type=job_service.CreateCustomJobRequest ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -559,9 +555,7 @@ async def test_create_custom_job_async_from_dict(): def test_create_custom_job_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -583,17 +577,12 @@ def test_create_custom_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_create_custom_job_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -617,16 +606,11 @@ async def test_create_custom_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_create_custom_job_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -653,9 +637,7 @@ def test_create_custom_job_flattened(): def test_create_custom_job_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -669,9 +651,7 @@ def test_create_custom_job_flattened_error(): @pytest.mark.asyncio async def test_create_custom_job_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -702,9 +682,7 @@ async def test_create_custom_job_flattened_async(): @pytest.mark.asyncio async def test_create_custom_job_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -720,8 +698,7 @@ def test_get_custom_job( transport: str = "grpc", request_type=job_service.GetCustomJobRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -765,8 +742,7 @@ async def test_get_custom_job_async( transport: str = "grpc_asyncio", request_type=job_service.GetCustomJobRequest ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -808,9 +784,7 @@ async def test_get_custom_job_async_from_dict(): def test_get_custom_job_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -830,17 +804,12 @@ def test_get_custom_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_custom_job_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -862,16 +831,11 @@ async def test_get_custom_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_custom_job_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_custom_job), "__call__") as call: @@ -880,9 +844,7 @@ def test_get_custom_job_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_custom_job( - name="name_value", - ) + client.get_custom_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -893,24 +855,19 @@ def test_get_custom_job_flattened(): def test_get_custom_job_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_custom_job( - job_service.GetCustomJobRequest(), - name="name_value", + job_service.GetCustomJobRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_custom_job_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_custom_job), "__call__") as call: @@ -922,9 +879,7 @@ async def test_get_custom_job_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_custom_job( - name="name_value", - ) + response = await client.get_custom_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -936,16 +891,13 @@ async def test_get_custom_job_flattened_async(): @pytest.mark.asyncio async def test_get_custom_job_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_custom_job( - job_service.GetCustomJobRequest(), - name="name_value", + job_service.GetCustomJobRequest(), name="name_value", ) @@ -953,8 +905,7 @@ def test_list_custom_jobs( transport: str = "grpc", request_type=job_service.ListCustomJobsRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -992,8 +943,7 @@ async def test_list_custom_jobs_async( transport: str = "grpc_asyncio", request_type=job_service.ListCustomJobsRequest ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1004,9 +954,7 @@ async def test_list_custom_jobs_async( with mock.patch.object(type(client.transport.list_custom_jobs), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - job_service.ListCustomJobsResponse( - next_page_token="next_page_token_value", - ) + job_service.ListCustomJobsResponse(next_page_token="next_page_token_value",) ) response = await client.list_custom_jobs(request) @@ -1029,9 +977,7 @@ async def test_list_custom_jobs_async_from_dict(): def test_list_custom_jobs_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1051,17 +997,12 @@ def test_list_custom_jobs_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_custom_jobs_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1083,16 +1024,11 @@ async def test_list_custom_jobs_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_custom_jobs_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_custom_jobs), "__call__") as call: @@ -1101,9 +1037,7 @@ def test_list_custom_jobs_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_custom_jobs( - parent="parent_value", - ) + client.list_custom_jobs(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1114,24 +1048,19 @@ def test_list_custom_jobs_flattened(): def test_list_custom_jobs_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_custom_jobs( - job_service.ListCustomJobsRequest(), - parent="parent_value", + job_service.ListCustomJobsRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_custom_jobs_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_custom_jobs), "__call__") as call: @@ -1143,9 +1072,7 @@ async def test_list_custom_jobs_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_custom_jobs( - parent="parent_value", - ) + response = await client.list_custom_jobs(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1157,23 +1084,18 @@ async def test_list_custom_jobs_flattened_async(): @pytest.mark.asyncio async def test_list_custom_jobs_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_custom_jobs( - job_service.ListCustomJobsRequest(), - parent="parent_value", + job_service.ListCustomJobsRequest(), parent="parent_value", ) def test_list_custom_jobs_pager(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_custom_jobs), "__call__") as call: @@ -1187,21 +1109,12 @@ def test_list_custom_jobs_pager(): ], next_page_token="abc", ), + job_service.ListCustomJobsResponse(custom_jobs=[], next_page_token="def",), job_service.ListCustomJobsResponse( - custom_jobs=[], - next_page_token="def", - ), - job_service.ListCustomJobsResponse( - custom_jobs=[ - custom_job.CustomJob(), - ], - next_page_token="ghi", + custom_jobs=[custom_job.CustomJob(),], next_page_token="ghi", ), job_service.ListCustomJobsResponse( - custom_jobs=[ - custom_job.CustomJob(), - custom_job.CustomJob(), - ], + custom_jobs=[custom_job.CustomJob(), custom_job.CustomJob(),], ), RuntimeError, ) @@ -1220,9 +1133,7 @@ def test_list_custom_jobs_pager(): def test_list_custom_jobs_pages(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_custom_jobs), "__call__") as call: @@ -1236,21 +1147,12 @@ def test_list_custom_jobs_pages(): ], next_page_token="abc", ), + job_service.ListCustomJobsResponse(custom_jobs=[], next_page_token="def",), job_service.ListCustomJobsResponse( - custom_jobs=[], - next_page_token="def", + custom_jobs=[custom_job.CustomJob(),], next_page_token="ghi", ), job_service.ListCustomJobsResponse( - custom_jobs=[ - custom_job.CustomJob(), - ], - next_page_token="ghi", - ), - job_service.ListCustomJobsResponse( - custom_jobs=[ - custom_job.CustomJob(), - custom_job.CustomJob(), - ], + custom_jobs=[custom_job.CustomJob(), custom_job.CustomJob(),], ), RuntimeError, ) @@ -1261,9 +1163,7 @@ def test_list_custom_jobs_pages(): @pytest.mark.asyncio async def test_list_custom_jobs_async_pager(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1279,27 +1179,16 @@ async def test_list_custom_jobs_async_pager(): ], next_page_token="abc", ), + job_service.ListCustomJobsResponse(custom_jobs=[], next_page_token="def",), job_service.ListCustomJobsResponse( - custom_jobs=[], - next_page_token="def", - ), - job_service.ListCustomJobsResponse( - custom_jobs=[ - custom_job.CustomJob(), - ], - next_page_token="ghi", + custom_jobs=[custom_job.CustomJob(),], next_page_token="ghi", ), job_service.ListCustomJobsResponse( - custom_jobs=[ - custom_job.CustomJob(), - custom_job.CustomJob(), - ], + custom_jobs=[custom_job.CustomJob(), custom_job.CustomJob(),], ), RuntimeError, ) - async_pager = await client.list_custom_jobs( - request={}, - ) + async_pager = await client.list_custom_jobs(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -1311,9 +1200,7 @@ async def test_list_custom_jobs_async_pager(): @pytest.mark.asyncio async def test_list_custom_jobs_async_pages(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1329,21 +1216,12 @@ async def test_list_custom_jobs_async_pages(): ], next_page_token="abc", ), + job_service.ListCustomJobsResponse(custom_jobs=[], next_page_token="def",), job_service.ListCustomJobsResponse( - custom_jobs=[], - next_page_token="def", + custom_jobs=[custom_job.CustomJob(),], next_page_token="ghi", ), job_service.ListCustomJobsResponse( - custom_jobs=[ - custom_job.CustomJob(), - ], - next_page_token="ghi", - ), - job_service.ListCustomJobsResponse( - custom_jobs=[ - custom_job.CustomJob(), - custom_job.CustomJob(), - ], + custom_jobs=[custom_job.CustomJob(), custom_job.CustomJob(),], ), RuntimeError, ) @@ -1358,8 +1236,7 @@ def test_delete_custom_job( transport: str = "grpc", request_type=job_service.DeleteCustomJobRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1394,8 +1271,7 @@ async def test_delete_custom_job_async( transport: str = "grpc_asyncio", request_type=job_service.DeleteCustomJobRequest ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1429,9 +1305,7 @@ async def test_delete_custom_job_async_from_dict(): def test_delete_custom_job_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1453,17 +1327,12 @@ def test_delete_custom_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_delete_custom_job_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1487,16 +1356,11 @@ async def test_delete_custom_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_delete_custom_job_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1507,9 +1371,7 @@ def test_delete_custom_job_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_custom_job( - name="name_value", - ) + client.delete_custom_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1520,24 +1382,19 @@ def test_delete_custom_job_flattened(): def test_delete_custom_job_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_custom_job( - job_service.DeleteCustomJobRequest(), - name="name_value", + job_service.DeleteCustomJobRequest(), name="name_value", ) @pytest.mark.asyncio async def test_delete_custom_job_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1551,9 +1408,7 @@ async def test_delete_custom_job_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_custom_job( - name="name_value", - ) + response = await client.delete_custom_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1565,16 +1420,13 @@ async def test_delete_custom_job_flattened_async(): @pytest.mark.asyncio async def test_delete_custom_job_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_custom_job( - job_service.DeleteCustomJobRequest(), - name="name_value", + job_service.DeleteCustomJobRequest(), name="name_value", ) @@ -1582,8 +1434,7 @@ def test_cancel_custom_job( transport: str = "grpc", request_type=job_service.CancelCustomJobRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1618,8 +1469,7 @@ async def test_cancel_custom_job_async( transport: str = "grpc_asyncio", request_type=job_service.CancelCustomJobRequest ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1651,9 +1501,7 @@ async def test_cancel_custom_job_async_from_dict(): def test_cancel_custom_job_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1675,17 +1523,12 @@ def test_cancel_custom_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_cancel_custom_job_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1707,16 +1550,11 @@ async def test_cancel_custom_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_cancel_custom_job_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1727,9 +1565,7 @@ def test_cancel_custom_job_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.cancel_custom_job( - name="name_value", - ) + client.cancel_custom_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1740,24 +1576,19 @@ def test_cancel_custom_job_flattened(): def test_cancel_custom_job_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.cancel_custom_job( - job_service.CancelCustomJobRequest(), - name="name_value", + job_service.CancelCustomJobRequest(), name="name_value", ) @pytest.mark.asyncio async def test_cancel_custom_job_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1769,9 +1600,7 @@ async def test_cancel_custom_job_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.cancel_custom_job( - name="name_value", - ) + response = await client.cancel_custom_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1783,16 +1612,13 @@ async def test_cancel_custom_job_flattened_async(): @pytest.mark.asyncio async def test_cancel_custom_job_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.cancel_custom_job( - job_service.CancelCustomJobRequest(), - name="name_value", + job_service.CancelCustomJobRequest(), name="name_value", ) @@ -1800,8 +1626,7 @@ def test_create_data_labeling_job( transport: str = "grpc", request_type=job_service.CreateDataLabelingJobRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1866,8 +1691,7 @@ async def test_create_data_labeling_job_async( request_type=job_service.CreateDataLabelingJobRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1929,9 +1753,7 @@ async def test_create_data_labeling_job_async_from_dict(): def test_create_data_labeling_job_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1953,17 +1775,12 @@ def test_create_data_labeling_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_create_data_labeling_job_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1987,16 +1804,11 @@ async def test_create_data_labeling_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_create_data_labeling_job_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2025,9 +1837,7 @@ def test_create_data_labeling_job_flattened(): def test_create_data_labeling_job_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2041,9 +1851,7 @@ def test_create_data_labeling_job_flattened_error(): @pytest.mark.asyncio async def test_create_data_labeling_job_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2076,9 +1884,7 @@ async def test_create_data_labeling_job_flattened_async(): @pytest.mark.asyncio async def test_create_data_labeling_job_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2094,8 +1900,7 @@ def test_get_data_labeling_job( transport: str = "grpc", request_type=job_service.GetDataLabelingJobRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2159,8 +1964,7 @@ async def test_get_data_labeling_job_async( transport: str = "grpc_asyncio", request_type=job_service.GetDataLabelingJobRequest ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2222,9 +2026,7 @@ async def test_get_data_labeling_job_async_from_dict(): def test_get_data_labeling_job_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2246,17 +2048,12 @@ def test_get_data_labeling_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_data_labeling_job_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2280,16 +2077,11 @@ async def test_get_data_labeling_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_data_labeling_job_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2300,9 +2092,7 @@ def test_get_data_labeling_job_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_data_labeling_job( - name="name_value", - ) + client.get_data_labeling_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -2313,24 +2103,19 @@ def test_get_data_labeling_job_flattened(): def test_get_data_labeling_job_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_data_labeling_job( - job_service.GetDataLabelingJobRequest(), - name="name_value", + job_service.GetDataLabelingJobRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_data_labeling_job_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2344,9 +2129,7 @@ async def test_get_data_labeling_job_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_data_labeling_job( - name="name_value", - ) + response = await client.get_data_labeling_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -2358,16 +2141,13 @@ async def test_get_data_labeling_job_flattened_async(): @pytest.mark.asyncio async def test_get_data_labeling_job_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_data_labeling_job( - job_service.GetDataLabelingJobRequest(), - name="name_value", + job_service.GetDataLabelingJobRequest(), name="name_value", ) @@ -2375,8 +2155,7 @@ def test_list_data_labeling_jobs( transport: str = "grpc", request_type=job_service.ListDataLabelingJobsRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2417,8 +2196,7 @@ async def test_list_data_labeling_jobs_async( request_type=job_service.ListDataLabelingJobsRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2456,9 +2234,7 @@ async def test_list_data_labeling_jobs_async_from_dict(): def test_list_data_labeling_jobs_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2480,17 +2256,12 @@ def test_list_data_labeling_jobs_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_data_labeling_jobs_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2514,16 +2285,11 @@ async def test_list_data_labeling_jobs_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_data_labeling_jobs_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2534,9 +2300,7 @@ def test_list_data_labeling_jobs_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_data_labeling_jobs( - parent="parent_value", - ) + client.list_data_labeling_jobs(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -2547,24 +2311,19 @@ def test_list_data_labeling_jobs_flattened(): def test_list_data_labeling_jobs_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_data_labeling_jobs( - job_service.ListDataLabelingJobsRequest(), - parent="parent_value", + job_service.ListDataLabelingJobsRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_data_labeling_jobs_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2578,9 +2337,7 @@ async def test_list_data_labeling_jobs_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_data_labeling_jobs( - parent="parent_value", - ) + response = await client.list_data_labeling_jobs(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -2592,23 +2349,18 @@ async def test_list_data_labeling_jobs_flattened_async(): @pytest.mark.asyncio async def test_list_data_labeling_jobs_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_data_labeling_jobs( - job_service.ListDataLabelingJobsRequest(), - parent="parent_value", + job_service.ListDataLabelingJobsRequest(), parent="parent_value", ) def test_list_data_labeling_jobs_pager(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2625,13 +2377,10 @@ def test_list_data_labeling_jobs_pager(): next_page_token="abc", ), job_service.ListDataLabelingJobsResponse( - data_labeling_jobs=[], - next_page_token="def", + data_labeling_jobs=[], next_page_token="def", ), job_service.ListDataLabelingJobsResponse( - data_labeling_jobs=[ - data_labeling_job.DataLabelingJob(), - ], + data_labeling_jobs=[data_labeling_job.DataLabelingJob(),], next_page_token="ghi", ), job_service.ListDataLabelingJobsResponse( @@ -2657,9 +2406,7 @@ def test_list_data_labeling_jobs_pager(): def test_list_data_labeling_jobs_pages(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2676,13 +2423,10 @@ def test_list_data_labeling_jobs_pages(): next_page_token="abc", ), job_service.ListDataLabelingJobsResponse( - data_labeling_jobs=[], - next_page_token="def", + data_labeling_jobs=[], next_page_token="def", ), job_service.ListDataLabelingJobsResponse( - data_labeling_jobs=[ - data_labeling_job.DataLabelingJob(), - ], + data_labeling_jobs=[data_labeling_job.DataLabelingJob(),], next_page_token="ghi", ), job_service.ListDataLabelingJobsResponse( @@ -2700,9 +2444,7 @@ def test_list_data_labeling_jobs_pages(): @pytest.mark.asyncio async def test_list_data_labeling_jobs_async_pager(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2721,13 +2463,10 @@ async def test_list_data_labeling_jobs_async_pager(): next_page_token="abc", ), job_service.ListDataLabelingJobsResponse( - data_labeling_jobs=[], - next_page_token="def", + data_labeling_jobs=[], next_page_token="def", ), job_service.ListDataLabelingJobsResponse( - data_labeling_jobs=[ - data_labeling_job.DataLabelingJob(), - ], + data_labeling_jobs=[data_labeling_job.DataLabelingJob(),], next_page_token="ghi", ), job_service.ListDataLabelingJobsResponse( @@ -2738,9 +2477,7 @@ async def test_list_data_labeling_jobs_async_pager(): ), RuntimeError, ) - async_pager = await client.list_data_labeling_jobs( - request={}, - ) + async_pager = await client.list_data_labeling_jobs(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -2752,9 +2489,7 @@ async def test_list_data_labeling_jobs_async_pager(): @pytest.mark.asyncio async def test_list_data_labeling_jobs_async_pages(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2773,13 +2508,10 @@ async def test_list_data_labeling_jobs_async_pages(): next_page_token="abc", ), job_service.ListDataLabelingJobsResponse( - data_labeling_jobs=[], - next_page_token="def", + data_labeling_jobs=[], next_page_token="def", ), job_service.ListDataLabelingJobsResponse( - data_labeling_jobs=[ - data_labeling_job.DataLabelingJob(), - ], + data_labeling_jobs=[data_labeling_job.DataLabelingJob(),], next_page_token="ghi", ), job_service.ListDataLabelingJobsResponse( @@ -2801,8 +2533,7 @@ def test_delete_data_labeling_job( transport: str = "grpc", request_type=job_service.DeleteDataLabelingJobRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2838,8 +2569,7 @@ async def test_delete_data_labeling_job_async( request_type=job_service.DeleteDataLabelingJobRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2873,9 +2603,7 @@ async def test_delete_data_labeling_job_async_from_dict(): def test_delete_data_labeling_job_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2897,17 +2625,12 @@ def test_delete_data_labeling_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_delete_data_labeling_job_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2931,16 +2654,11 @@ async def test_delete_data_labeling_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_delete_data_labeling_job_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2951,9 +2669,7 @@ def test_delete_data_labeling_job_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_data_labeling_job( - name="name_value", - ) + client.delete_data_labeling_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -2964,24 +2680,19 @@ def test_delete_data_labeling_job_flattened(): def test_delete_data_labeling_job_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_data_labeling_job( - job_service.DeleteDataLabelingJobRequest(), - name="name_value", + job_service.DeleteDataLabelingJobRequest(), name="name_value", ) @pytest.mark.asyncio async def test_delete_data_labeling_job_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2995,9 +2706,7 @@ async def test_delete_data_labeling_job_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_data_labeling_job( - name="name_value", - ) + response = await client.delete_data_labeling_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -3009,16 +2718,13 @@ async def test_delete_data_labeling_job_flattened_async(): @pytest.mark.asyncio async def test_delete_data_labeling_job_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_data_labeling_job( - job_service.DeleteDataLabelingJobRequest(), - name="name_value", + job_service.DeleteDataLabelingJobRequest(), name="name_value", ) @@ -3026,8 +2732,7 @@ def test_cancel_data_labeling_job( transport: str = "grpc", request_type=job_service.CancelDataLabelingJobRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3063,8 +2768,7 @@ async def test_cancel_data_labeling_job_async( request_type=job_service.CancelDataLabelingJobRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3096,9 +2800,7 @@ async def test_cancel_data_labeling_job_async_from_dict(): def test_cancel_data_labeling_job_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3120,17 +2822,12 @@ def test_cancel_data_labeling_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_cancel_data_labeling_job_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3152,16 +2849,11 @@ async def test_cancel_data_labeling_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_cancel_data_labeling_job_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3172,9 +2864,7 @@ def test_cancel_data_labeling_job_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.cancel_data_labeling_job( - name="name_value", - ) + client.cancel_data_labeling_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -3185,24 +2875,19 @@ def test_cancel_data_labeling_job_flattened(): def test_cancel_data_labeling_job_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.cancel_data_labeling_job( - job_service.CancelDataLabelingJobRequest(), - name="name_value", + job_service.CancelDataLabelingJobRequest(), name="name_value", ) @pytest.mark.asyncio async def test_cancel_data_labeling_job_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3214,9 +2899,7 @@ async def test_cancel_data_labeling_job_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.cancel_data_labeling_job( - name="name_value", - ) + response = await client.cancel_data_labeling_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -3228,16 +2911,13 @@ async def test_cancel_data_labeling_job_flattened_async(): @pytest.mark.asyncio async def test_cancel_data_labeling_job_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.cancel_data_labeling_job( - job_service.CancelDataLabelingJobRequest(), - name="name_value", + job_service.CancelDataLabelingJobRequest(), name="name_value", ) @@ -3246,8 +2926,7 @@ def test_create_hyperparameter_tuning_job( request_type=job_service.CreateHyperparameterTuningJobRequest, ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3303,8 +2982,7 @@ async def test_create_hyperparameter_tuning_job_async( request_type=job_service.CreateHyperparameterTuningJobRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3357,9 +3035,7 @@ async def test_create_hyperparameter_tuning_job_async_from_dict(): def test_create_hyperparameter_tuning_job_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3381,17 +3057,12 @@ def test_create_hyperparameter_tuning_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_create_hyperparameter_tuning_job_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3415,16 +3086,11 @@ async def test_create_hyperparameter_tuning_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_create_hyperparameter_tuning_job_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3457,9 +3123,7 @@ def test_create_hyperparameter_tuning_job_flattened(): def test_create_hyperparameter_tuning_job_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3475,9 +3139,7 @@ def test_create_hyperparameter_tuning_job_flattened_error(): @pytest.mark.asyncio async def test_create_hyperparameter_tuning_job_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3514,9 +3176,7 @@ async def test_create_hyperparameter_tuning_job_flattened_async(): @pytest.mark.asyncio async def test_create_hyperparameter_tuning_job_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3534,8 +3194,7 @@ def test_get_hyperparameter_tuning_job( transport: str = "grpc", request_type=job_service.GetHyperparameterTuningJobRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3591,8 +3250,7 @@ async def test_get_hyperparameter_tuning_job_async( request_type=job_service.GetHyperparameterTuningJobRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3645,9 +3303,7 @@ async def test_get_hyperparameter_tuning_job_async_from_dict(): def test_get_hyperparameter_tuning_job_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3669,17 +3325,12 @@ def test_get_hyperparameter_tuning_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_hyperparameter_tuning_job_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3703,16 +3354,11 @@ async def test_get_hyperparameter_tuning_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_hyperparameter_tuning_job_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3723,9 +3369,7 @@ def test_get_hyperparameter_tuning_job_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_hyperparameter_tuning_job( - name="name_value", - ) + client.get_hyperparameter_tuning_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -3736,24 +3380,19 @@ def test_get_hyperparameter_tuning_job_flattened(): def test_get_hyperparameter_tuning_job_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_hyperparameter_tuning_job( - job_service.GetHyperparameterTuningJobRequest(), - name="name_value", + job_service.GetHyperparameterTuningJobRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_hyperparameter_tuning_job_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3767,9 +3406,7 @@ async def test_get_hyperparameter_tuning_job_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_hyperparameter_tuning_job( - name="name_value", - ) + response = await client.get_hyperparameter_tuning_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -3781,16 +3418,13 @@ async def test_get_hyperparameter_tuning_job_flattened_async(): @pytest.mark.asyncio async def test_get_hyperparameter_tuning_job_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_hyperparameter_tuning_job( - job_service.GetHyperparameterTuningJobRequest(), - name="name_value", + job_service.GetHyperparameterTuningJobRequest(), name="name_value", ) @@ -3799,8 +3433,7 @@ def test_list_hyperparameter_tuning_jobs( request_type=job_service.ListHyperparameterTuningJobsRequest, ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3841,8 +3474,7 @@ async def test_list_hyperparameter_tuning_jobs_async( request_type=job_service.ListHyperparameterTuningJobsRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3880,9 +3512,7 @@ async def test_list_hyperparameter_tuning_jobs_async_from_dict(): def test_list_hyperparameter_tuning_jobs_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3904,17 +3534,12 @@ def test_list_hyperparameter_tuning_jobs_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_hyperparameter_tuning_jobs_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3938,16 +3563,11 @@ async def test_list_hyperparameter_tuning_jobs_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_hyperparameter_tuning_jobs_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3958,9 +3578,7 @@ def test_list_hyperparameter_tuning_jobs_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_hyperparameter_tuning_jobs( - parent="parent_value", - ) + client.list_hyperparameter_tuning_jobs(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -3971,24 +3589,19 @@ def test_list_hyperparameter_tuning_jobs_flattened(): def test_list_hyperparameter_tuning_jobs_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_hyperparameter_tuning_jobs( - job_service.ListHyperparameterTuningJobsRequest(), - parent="parent_value", + job_service.ListHyperparameterTuningJobsRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_hyperparameter_tuning_jobs_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4002,9 +3615,7 @@ async def test_list_hyperparameter_tuning_jobs_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_hyperparameter_tuning_jobs( - parent="parent_value", - ) + response = await client.list_hyperparameter_tuning_jobs(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -4016,23 +3627,18 @@ async def test_list_hyperparameter_tuning_jobs_flattened_async(): @pytest.mark.asyncio async def test_list_hyperparameter_tuning_jobs_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_hyperparameter_tuning_jobs( - job_service.ListHyperparameterTuningJobsRequest(), - parent="parent_value", + job_service.ListHyperparameterTuningJobsRequest(), parent="parent_value", ) -def test_list_hyperparameter_tuning_jobs_pager(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials, - ) +def test_list_hyperparameter_tuning_jobs_pager(): + client = JobServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4049,8 +3655,7 @@ def test_list_hyperparameter_tuning_jobs_pager(): next_page_token="abc", ), job_service.ListHyperparameterTuningJobsResponse( - hyperparameter_tuning_jobs=[], - next_page_token="def", + hyperparameter_tuning_jobs=[], next_page_token="def", ), job_service.ListHyperparameterTuningJobsResponse( hyperparameter_tuning_jobs=[ @@ -4084,9 +3689,7 @@ def test_list_hyperparameter_tuning_jobs_pager(): def test_list_hyperparameter_tuning_jobs_pages(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4103,8 +3706,7 @@ def test_list_hyperparameter_tuning_jobs_pages(): next_page_token="abc", ), job_service.ListHyperparameterTuningJobsResponse( - hyperparameter_tuning_jobs=[], - next_page_token="def", + hyperparameter_tuning_jobs=[], next_page_token="def", ), job_service.ListHyperparameterTuningJobsResponse( hyperparameter_tuning_jobs=[ @@ -4127,9 +3729,7 @@ def test_list_hyperparameter_tuning_jobs_pages(): @pytest.mark.asyncio async def test_list_hyperparameter_tuning_jobs_async_pager(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4148,8 +3748,7 @@ async def test_list_hyperparameter_tuning_jobs_async_pager(): next_page_token="abc", ), job_service.ListHyperparameterTuningJobsResponse( - hyperparameter_tuning_jobs=[], - next_page_token="def", + hyperparameter_tuning_jobs=[], next_page_token="def", ), job_service.ListHyperparameterTuningJobsResponse( hyperparameter_tuning_jobs=[ @@ -4165,9 +3764,7 @@ async def test_list_hyperparameter_tuning_jobs_async_pager(): ), RuntimeError, ) - async_pager = await client.list_hyperparameter_tuning_jobs( - request={}, - ) + async_pager = await client.list_hyperparameter_tuning_jobs(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -4182,9 +3779,7 @@ async def test_list_hyperparameter_tuning_jobs_async_pager(): @pytest.mark.asyncio async def test_list_hyperparameter_tuning_jobs_async_pages(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4203,8 +3798,7 @@ async def test_list_hyperparameter_tuning_jobs_async_pages(): next_page_token="abc", ), job_service.ListHyperparameterTuningJobsResponse( - hyperparameter_tuning_jobs=[], - next_page_token="def", + hyperparameter_tuning_jobs=[], next_page_token="def", ), job_service.ListHyperparameterTuningJobsResponse( hyperparameter_tuning_jobs=[ @@ -4234,8 +3828,7 @@ def test_delete_hyperparameter_tuning_job( request_type=job_service.DeleteHyperparameterTuningJobRequest, ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4271,8 +3864,7 @@ async def test_delete_hyperparameter_tuning_job_async( request_type=job_service.DeleteHyperparameterTuningJobRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4306,9 +3898,7 @@ async def test_delete_hyperparameter_tuning_job_async_from_dict(): def test_delete_hyperparameter_tuning_job_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -4330,17 +3920,12 @@ def test_delete_hyperparameter_tuning_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_delete_hyperparameter_tuning_job_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -4364,16 +3949,11 @@ async def test_delete_hyperparameter_tuning_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_delete_hyperparameter_tuning_job_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4384,9 +3964,7 @@ def test_delete_hyperparameter_tuning_job_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_hyperparameter_tuning_job( - name="name_value", - ) + client.delete_hyperparameter_tuning_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -4397,24 +3975,19 @@ def test_delete_hyperparameter_tuning_job_flattened(): def test_delete_hyperparameter_tuning_job_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_hyperparameter_tuning_job( - job_service.DeleteHyperparameterTuningJobRequest(), - name="name_value", + job_service.DeleteHyperparameterTuningJobRequest(), name="name_value", ) @pytest.mark.asyncio async def test_delete_hyperparameter_tuning_job_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4428,9 +4001,7 @@ async def test_delete_hyperparameter_tuning_job_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_hyperparameter_tuning_job( - name="name_value", - ) + response = await client.delete_hyperparameter_tuning_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -4442,16 +4013,13 @@ async def test_delete_hyperparameter_tuning_job_flattened_async(): @pytest.mark.asyncio async def test_delete_hyperparameter_tuning_job_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_hyperparameter_tuning_job( - job_service.DeleteHyperparameterTuningJobRequest(), - name="name_value", + job_service.DeleteHyperparameterTuningJobRequest(), name="name_value", ) @@ -4460,8 +4028,7 @@ def test_cancel_hyperparameter_tuning_job( request_type=job_service.CancelHyperparameterTuningJobRequest, ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4497,8 +4064,7 @@ async def test_cancel_hyperparameter_tuning_job_async( request_type=job_service.CancelHyperparameterTuningJobRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4530,9 +4096,7 @@ async def test_cancel_hyperparameter_tuning_job_async_from_dict(): def test_cancel_hyperparameter_tuning_job_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -4554,17 +4118,12 @@ def test_cancel_hyperparameter_tuning_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_cancel_hyperparameter_tuning_job_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -4586,16 +4145,11 @@ async def test_cancel_hyperparameter_tuning_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_cancel_hyperparameter_tuning_job_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4606,9 +4160,7 @@ def test_cancel_hyperparameter_tuning_job_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.cancel_hyperparameter_tuning_job( - name="name_value", - ) + client.cancel_hyperparameter_tuning_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -4619,24 +4171,19 @@ def test_cancel_hyperparameter_tuning_job_flattened(): def test_cancel_hyperparameter_tuning_job_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.cancel_hyperparameter_tuning_job( - job_service.CancelHyperparameterTuningJobRequest(), - name="name_value", + job_service.CancelHyperparameterTuningJobRequest(), name="name_value", ) @pytest.mark.asyncio async def test_cancel_hyperparameter_tuning_job_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4648,9 +4195,7 @@ async def test_cancel_hyperparameter_tuning_job_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.cancel_hyperparameter_tuning_job( - name="name_value", - ) + response = await client.cancel_hyperparameter_tuning_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -4662,16 +4207,13 @@ async def test_cancel_hyperparameter_tuning_job_flattened_async(): @pytest.mark.asyncio async def test_cancel_hyperparameter_tuning_job_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.cancel_hyperparameter_tuning_job( - job_service.CancelHyperparameterTuningJobRequest(), - name="name_value", + job_service.CancelHyperparameterTuningJobRequest(), name="name_value", ) @@ -4679,8 +4221,7 @@ def test_create_batch_prediction_job( transport: str = "grpc", request_type=job_service.CreateBatchPredictionJobRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4733,8 +4274,7 @@ async def test_create_batch_prediction_job_async( request_type=job_service.CreateBatchPredictionJobRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4784,9 +4324,7 @@ async def test_create_batch_prediction_job_async_from_dict(): def test_create_batch_prediction_job_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -4808,17 +4346,12 @@ def test_create_batch_prediction_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_create_batch_prediction_job_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -4842,16 +4375,11 @@ async def test_create_batch_prediction_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_create_batch_prediction_job_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4884,9 +4412,7 @@ def test_create_batch_prediction_job_flattened(): def test_create_batch_prediction_job_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4902,9 +4428,7 @@ def test_create_batch_prediction_job_flattened_error(): @pytest.mark.asyncio async def test_create_batch_prediction_job_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4941,9 +4465,7 @@ async def test_create_batch_prediction_job_flattened_async(): @pytest.mark.asyncio async def test_create_batch_prediction_job_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4961,8 +4483,7 @@ def test_get_batch_prediction_job( transport: str = "grpc", request_type=job_service.GetBatchPredictionJobRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5015,8 +4536,7 @@ async def test_get_batch_prediction_job_async( request_type=job_service.GetBatchPredictionJobRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5066,9 +4586,7 @@ async def test_get_batch_prediction_job_async_from_dict(): def test_get_batch_prediction_job_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -5090,17 +4608,12 @@ def test_get_batch_prediction_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_batch_prediction_job_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -5124,16 +4637,11 @@ async def test_get_batch_prediction_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_batch_prediction_job_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5144,9 +4652,7 @@ def test_get_batch_prediction_job_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_batch_prediction_job( - name="name_value", - ) + client.get_batch_prediction_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -5157,24 +4663,19 @@ def test_get_batch_prediction_job_flattened(): def test_get_batch_prediction_job_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_batch_prediction_job( - job_service.GetBatchPredictionJobRequest(), - name="name_value", + job_service.GetBatchPredictionJobRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_batch_prediction_job_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5188,9 +4689,7 @@ async def test_get_batch_prediction_job_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_batch_prediction_job( - name="name_value", - ) + response = await client.get_batch_prediction_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -5202,16 +4701,13 @@ async def test_get_batch_prediction_job_flattened_async(): @pytest.mark.asyncio async def test_get_batch_prediction_job_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_batch_prediction_job( - job_service.GetBatchPredictionJobRequest(), - name="name_value", + job_service.GetBatchPredictionJobRequest(), name="name_value", ) @@ -5219,8 +4715,7 @@ def test_list_batch_prediction_jobs( transport: str = "grpc", request_type=job_service.ListBatchPredictionJobsRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5261,8 +4756,7 @@ async def test_list_batch_prediction_jobs_async( request_type=job_service.ListBatchPredictionJobsRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5300,9 +4794,7 @@ async def test_list_batch_prediction_jobs_async_from_dict(): def test_list_batch_prediction_jobs_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -5324,17 +4816,12 @@ def test_list_batch_prediction_jobs_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_batch_prediction_jobs_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -5358,16 +4845,11 @@ async def test_list_batch_prediction_jobs_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_batch_prediction_jobs_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5378,9 +4860,7 @@ def test_list_batch_prediction_jobs_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_batch_prediction_jobs( - parent="parent_value", - ) + client.list_batch_prediction_jobs(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -5391,24 +4871,19 @@ def test_list_batch_prediction_jobs_flattened(): def test_list_batch_prediction_jobs_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_batch_prediction_jobs( - job_service.ListBatchPredictionJobsRequest(), - parent="parent_value", + job_service.ListBatchPredictionJobsRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_batch_prediction_jobs_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5422,9 +4897,7 @@ async def test_list_batch_prediction_jobs_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_batch_prediction_jobs( - parent="parent_value", - ) + response = await client.list_batch_prediction_jobs(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -5436,23 +4909,18 @@ async def test_list_batch_prediction_jobs_flattened_async(): @pytest.mark.asyncio async def test_list_batch_prediction_jobs_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_batch_prediction_jobs( - job_service.ListBatchPredictionJobsRequest(), - parent="parent_value", + job_service.ListBatchPredictionJobsRequest(), parent="parent_value", ) def test_list_batch_prediction_jobs_pager(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5469,13 +4937,10 @@ def test_list_batch_prediction_jobs_pager(): next_page_token="abc", ), job_service.ListBatchPredictionJobsResponse( - batch_prediction_jobs=[], - next_page_token="def", + batch_prediction_jobs=[], next_page_token="def", ), job_service.ListBatchPredictionJobsResponse( - batch_prediction_jobs=[ - batch_prediction_job.BatchPredictionJob(), - ], + batch_prediction_jobs=[batch_prediction_job.BatchPredictionJob(),], next_page_token="ghi", ), job_service.ListBatchPredictionJobsResponse( @@ -5503,9 +4968,7 @@ def test_list_batch_prediction_jobs_pager(): def test_list_batch_prediction_jobs_pages(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5522,13 +4985,10 @@ def test_list_batch_prediction_jobs_pages(): next_page_token="abc", ), job_service.ListBatchPredictionJobsResponse( - batch_prediction_jobs=[], - next_page_token="def", + batch_prediction_jobs=[], next_page_token="def", ), job_service.ListBatchPredictionJobsResponse( - batch_prediction_jobs=[ - batch_prediction_job.BatchPredictionJob(), - ], + batch_prediction_jobs=[batch_prediction_job.BatchPredictionJob(),], next_page_token="ghi", ), job_service.ListBatchPredictionJobsResponse( @@ -5546,9 +5006,7 @@ def test_list_batch_prediction_jobs_pages(): @pytest.mark.asyncio async def test_list_batch_prediction_jobs_async_pager(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5567,13 +5025,10 @@ async def test_list_batch_prediction_jobs_async_pager(): next_page_token="abc", ), job_service.ListBatchPredictionJobsResponse( - batch_prediction_jobs=[], - next_page_token="def", + batch_prediction_jobs=[], next_page_token="def", ), job_service.ListBatchPredictionJobsResponse( - batch_prediction_jobs=[ - batch_prediction_job.BatchPredictionJob(), - ], + batch_prediction_jobs=[batch_prediction_job.BatchPredictionJob(),], next_page_token="ghi", ), job_service.ListBatchPredictionJobsResponse( @@ -5584,9 +5039,7 @@ async def test_list_batch_prediction_jobs_async_pager(): ), RuntimeError, ) - async_pager = await client.list_batch_prediction_jobs( - request={}, - ) + async_pager = await client.list_batch_prediction_jobs(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -5600,9 +5053,7 @@ async def test_list_batch_prediction_jobs_async_pager(): @pytest.mark.asyncio async def test_list_batch_prediction_jobs_async_pages(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5621,13 +5072,10 @@ async def test_list_batch_prediction_jobs_async_pages(): next_page_token="abc", ), job_service.ListBatchPredictionJobsResponse( - batch_prediction_jobs=[], - next_page_token="def", + batch_prediction_jobs=[], next_page_token="def", ), job_service.ListBatchPredictionJobsResponse( - batch_prediction_jobs=[ - batch_prediction_job.BatchPredictionJob(), - ], + batch_prediction_jobs=[batch_prediction_job.BatchPredictionJob(),], next_page_token="ghi", ), job_service.ListBatchPredictionJobsResponse( @@ -5649,8 +5097,7 @@ def test_delete_batch_prediction_job( transport: str = "grpc", request_type=job_service.DeleteBatchPredictionJobRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5686,8 +5133,7 @@ async def test_delete_batch_prediction_job_async( request_type=job_service.DeleteBatchPredictionJobRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5721,9 +5167,7 @@ async def test_delete_batch_prediction_job_async_from_dict(): def test_delete_batch_prediction_job_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -5745,17 +5189,12 @@ def test_delete_batch_prediction_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_delete_batch_prediction_job_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -5779,16 +5218,11 @@ async def test_delete_batch_prediction_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_delete_batch_prediction_job_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5799,9 +5233,7 @@ def test_delete_batch_prediction_job_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_batch_prediction_job( - name="name_value", - ) + client.delete_batch_prediction_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -5812,24 +5244,19 @@ def test_delete_batch_prediction_job_flattened(): def test_delete_batch_prediction_job_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_batch_prediction_job( - job_service.DeleteBatchPredictionJobRequest(), - name="name_value", + job_service.DeleteBatchPredictionJobRequest(), name="name_value", ) @pytest.mark.asyncio async def test_delete_batch_prediction_job_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5843,9 +5270,7 @@ async def test_delete_batch_prediction_job_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_batch_prediction_job( - name="name_value", - ) + response = await client.delete_batch_prediction_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -5857,16 +5282,13 @@ async def test_delete_batch_prediction_job_flattened_async(): @pytest.mark.asyncio async def test_delete_batch_prediction_job_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_batch_prediction_job( - job_service.DeleteBatchPredictionJobRequest(), - name="name_value", + job_service.DeleteBatchPredictionJobRequest(), name="name_value", ) @@ -5874,8 +5296,7 @@ def test_cancel_batch_prediction_job( transport: str = "grpc", request_type=job_service.CancelBatchPredictionJobRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5911,8 +5332,7 @@ async def test_cancel_batch_prediction_job_async( request_type=job_service.CancelBatchPredictionJobRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5944,9 +5364,7 @@ async def test_cancel_batch_prediction_job_async_from_dict(): def test_cancel_batch_prediction_job_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -5968,17 +5386,12 @@ def test_cancel_batch_prediction_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_cancel_batch_prediction_job_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -6000,16 +5413,11 @@ async def test_cancel_batch_prediction_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_cancel_batch_prediction_job_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6020,9 +5428,7 @@ def test_cancel_batch_prediction_job_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.cancel_batch_prediction_job( - name="name_value", - ) + client.cancel_batch_prediction_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -6033,24 +5439,19 @@ def test_cancel_batch_prediction_job_flattened(): def test_cancel_batch_prediction_job_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.cancel_batch_prediction_job( - job_service.CancelBatchPredictionJobRequest(), - name="name_value", + job_service.CancelBatchPredictionJobRequest(), name="name_value", ) @pytest.mark.asyncio async def test_cancel_batch_prediction_job_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6062,9 +5463,7 @@ async def test_cancel_batch_prediction_job_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.cancel_batch_prediction_job( - name="name_value", - ) + response = await client.cancel_batch_prediction_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -6076,16 +5475,13 @@ async def test_cancel_batch_prediction_job_flattened_async(): @pytest.mark.asyncio async def test_cancel_batch_prediction_job_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.cancel_batch_prediction_job( - job_service.CancelBatchPredictionJobRequest(), - name="name_value", + job_service.CancelBatchPredictionJobRequest(), name="name_value", ) @@ -6096,8 +5492,7 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -6116,8 +5511,7 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = JobServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + client_options={"scopes": ["1", "2"]}, transport=transport, ) @@ -6159,13 +5553,8 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.JobServiceGrpcTransport, - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client.transport, transports.JobServiceGrpcTransport,) def test_job_service_base_transport_error(): @@ -6231,8 +5620,7 @@ def test_job_service_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (credentials.AnonymousCredentials(), None) transport = transports.JobServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", + credentials_file="credentials.json", quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -6302,8 +5690,7 @@ def test_job_service_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.JobServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -6315,8 +5702,7 @@ def test_job_service_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.JobServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -6405,16 +5791,12 @@ def test_job_service_transport_channel_mtls_with_adc(transport_class): def test_job_service_grpc_lro_client(): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport="grpc", + credentials=credentials.AnonymousCredentials(), transport="grpc", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) + assert isinstance(transport.operations_client, operations_v1.OperationsClient,) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -6422,16 +5804,12 @@ def test_job_service_grpc_lro_client(): def test_job_service_grpc_lro_async_client(): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport="grpc_asyncio", + credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) + assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -6443,9 +5821,7 @@ def test_batch_prediction_job_path(): batch_prediction_job = "whelk" expected = "projects/{project}/locations/{location}/batchPredictionJobs/{batch_prediction_job}".format( - project=project, - location=location, - batch_prediction_job=batch_prediction_job, + project=project, location=location, batch_prediction_job=batch_prediction_job, ) actual = JobServiceClient.batch_prediction_job_path( project, location, batch_prediction_job @@ -6472,9 +5848,7 @@ def test_custom_job_path(): custom_job = "winkle" expected = "projects/{project}/locations/{location}/customJobs/{custom_job}".format( - project=project, - location=location, - custom_job=custom_job, + project=project, location=location, custom_job=custom_job, ) actual = JobServiceClient.custom_job_path(project, location, custom_job) assert expected == actual @@ -6499,9 +5873,7 @@ def test_data_labeling_job_path(): data_labeling_job = "whelk" expected = "projects/{project}/locations/{location}/dataLabelingJobs/{data_labeling_job}".format( - project=project, - location=location, - data_labeling_job=data_labeling_job, + project=project, location=location, data_labeling_job=data_labeling_job, ) actual = JobServiceClient.data_labeling_job_path( project, location, data_labeling_job @@ -6528,9 +5900,7 @@ def test_dataset_path(): dataset = "winkle" expected = "projects/{project}/locations/{location}/datasets/{dataset}".format( - project=project, - location=location, - dataset=dataset, + project=project, location=location, dataset=dataset, ) actual = JobServiceClient.dataset_path(project, location, dataset) assert expected == actual @@ -6584,9 +5954,7 @@ def test_model_path(): model = "winkle" expected = "projects/{project}/locations/{location}/models/{model}".format( - project=project, - location=location, - model=model, + project=project, location=location, model=model, ) actual = JobServiceClient.model_path(project, location, model) assert expected == actual @@ -6629,9 +5997,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format( - folder=folder, - ) + expected = "folders/{folder}".format(folder=folder,) actual = JobServiceClient.common_folder_path(folder) assert expected == actual @@ -6650,9 +6016,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format( - organization=organization, - ) + expected = "organizations/{organization}".format(organization=organization,) actual = JobServiceClient.common_organization_path(organization) assert expected == actual @@ -6671,9 +6035,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format( - project=project, - ) + expected = "projects/{project}".format(project=project,) actual = JobServiceClient.common_project_path(project) assert expected == actual @@ -6694,8 +6056,7 @@ def test_common_location_path(): location = "nautilus" expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, + project=project, location=location, ) actual = JobServiceClient.common_location_path(project, location) assert expected == actual @@ -6720,8 +6081,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.JobServiceTransport, "_prep_wrapped_messages" ) as prep: client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - client_info=client_info, + credentials=credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -6730,7 +6090,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = JobServiceClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), - client_info=client_info, + credentials=credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_migration_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_migration_service.py index 23f17a54b6..85e6a2d362 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_migration_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_migration_service.py @@ -393,9 +393,7 @@ def test_migration_service_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) + options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) @@ -464,8 +462,7 @@ def test_search_migratable_resources( request_type=migration_service.SearchMigratableResourcesRequest, ): client = MigrationServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -506,8 +503,7 @@ async def test_search_migratable_resources_async( request_type=migration_service.SearchMigratableResourcesRequest, ): client = MigrationServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -545,9 +541,7 @@ async def test_search_migratable_resources_async_from_dict(): def test_search_migratable_resources_field_headers(): - client = MigrationServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = MigrationServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -569,10 +563,7 @@ def test_search_migratable_resources_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio @@ -603,16 +594,11 @@ async def test_search_migratable_resources_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_search_migratable_resources_flattened(): - client = MigrationServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = MigrationServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -623,9 +609,7 @@ def test_search_migratable_resources_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.search_migratable_resources( - parent="parent_value", - ) + client.search_migratable_resources(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -636,16 +620,13 @@ def test_search_migratable_resources_flattened(): def test_search_migratable_resources_flattened_error(): - client = MigrationServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = MigrationServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.search_migratable_resources( - migration_service.SearchMigratableResourcesRequest(), - parent="parent_value", + migration_service.SearchMigratableResourcesRequest(), parent="parent_value", ) @@ -667,9 +648,7 @@ async def test_search_migratable_resources_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.search_migratable_resources( - parent="parent_value", - ) + response = await client.search_migratable_resources(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -689,15 +668,12 @@ async def test_search_migratable_resources_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.search_migratable_resources( - migration_service.SearchMigratableResourcesRequest(), - parent="parent_value", + migration_service.SearchMigratableResourcesRequest(), parent="parent_value", ) def test_search_migratable_resources_pager(): - client = MigrationServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = MigrationServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -714,13 +690,10 @@ def test_search_migratable_resources_pager(): next_page_token="abc", ), migration_service.SearchMigratableResourcesResponse( - migratable_resources=[], - next_page_token="def", + migratable_resources=[], next_page_token="def", ), migration_service.SearchMigratableResourcesResponse( - migratable_resources=[ - migratable_resource.MigratableResource(), - ], + migratable_resources=[migratable_resource.MigratableResource(),], next_page_token="ghi", ), migration_service.SearchMigratableResourcesResponse( @@ -748,9 +721,7 @@ def test_search_migratable_resources_pager(): def test_search_migratable_resources_pages(): - client = MigrationServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = MigrationServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -767,13 +738,10 @@ def test_search_migratable_resources_pages(): next_page_token="abc", ), migration_service.SearchMigratableResourcesResponse( - migratable_resources=[], - next_page_token="def", + migratable_resources=[], next_page_token="def", ), migration_service.SearchMigratableResourcesResponse( - migratable_resources=[ - migratable_resource.MigratableResource(), - ], + migratable_resources=[migratable_resource.MigratableResource(),], next_page_token="ghi", ), migration_service.SearchMigratableResourcesResponse( @@ -791,9 +759,7 @@ def test_search_migratable_resources_pages(): @pytest.mark.asyncio async def test_search_migratable_resources_async_pager(): - client = MigrationServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = MigrationServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -812,13 +778,10 @@ async def test_search_migratable_resources_async_pager(): next_page_token="abc", ), migration_service.SearchMigratableResourcesResponse( - migratable_resources=[], - next_page_token="def", + migratable_resources=[], next_page_token="def", ), migration_service.SearchMigratableResourcesResponse( - migratable_resources=[ - migratable_resource.MigratableResource(), - ], + migratable_resources=[migratable_resource.MigratableResource(),], next_page_token="ghi", ), migration_service.SearchMigratableResourcesResponse( @@ -829,9 +792,7 @@ async def test_search_migratable_resources_async_pager(): ), RuntimeError, ) - async_pager = await client.search_migratable_resources( - request={}, - ) + async_pager = await client.search_migratable_resources(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -845,9 +806,7 @@ async def test_search_migratable_resources_async_pager(): @pytest.mark.asyncio async def test_search_migratable_resources_async_pages(): - client = MigrationServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = MigrationServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -866,13 +825,10 @@ async def test_search_migratable_resources_async_pages(): next_page_token="abc", ), migration_service.SearchMigratableResourcesResponse( - migratable_resources=[], - next_page_token="def", + migratable_resources=[], next_page_token="def", ), migration_service.SearchMigratableResourcesResponse( - migratable_resources=[ - migratable_resource.MigratableResource(), - ], + migratable_resources=[migratable_resource.MigratableResource(),], next_page_token="ghi", ), migration_service.SearchMigratableResourcesResponse( @@ -894,8 +850,7 @@ def test_batch_migrate_resources( transport: str = "grpc", request_type=migration_service.BatchMigrateResourcesRequest ): client = MigrationServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -931,8 +886,7 @@ async def test_batch_migrate_resources_async( request_type=migration_service.BatchMigrateResourcesRequest, ): client = MigrationServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -966,9 +920,7 @@ async def test_batch_migrate_resources_async_from_dict(): def test_batch_migrate_resources_field_headers(): - client = MigrationServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = MigrationServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -990,10 +942,7 @@ def test_batch_migrate_resources_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio @@ -1024,16 +973,11 @@ async def test_batch_migrate_resources_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_batch_migrate_resources_flattened(): - client = MigrationServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = MigrationServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1072,9 +1016,7 @@ def test_batch_migrate_resources_flattened(): def test_batch_migrate_resources_flattened_error(): - client = MigrationServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = MigrationServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1166,8 +1108,7 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = MigrationServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -1186,8 +1127,7 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = MigrationServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + client_options={"scopes": ["1", "2"]}, transport=transport, ) @@ -1232,13 +1172,8 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = MigrationServiceClient( - credentials=credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.MigrationServiceGrpcTransport, - ) + client = MigrationServiceClient(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client.transport, transports.MigrationServiceGrpcTransport,) def test_migration_service_base_transport_error(): @@ -1286,8 +1221,7 @@ def test_migration_service_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (credentials.AnonymousCredentials(), None) transport = transports.MigrationServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", + credentials_file="credentials.json", quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -1357,8 +1291,7 @@ def test_migration_service_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.MigrationServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -1370,8 +1303,7 @@ def test_migration_service_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.MigrationServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -1468,16 +1400,12 @@ def test_migration_service_transport_channel_mtls_with_adc(transport_class): def test_migration_service_grpc_lro_client(): client = MigrationServiceClient( - credentials=credentials.AnonymousCredentials(), - transport="grpc", + credentials=credentials.AnonymousCredentials(), transport="grpc", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) + assert isinstance(transport.operations_client, operations_v1.OperationsClient,) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -1485,16 +1413,12 @@ def test_migration_service_grpc_lro_client(): def test_migration_service_grpc_lro_async_client(): client = MigrationServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport="grpc_asyncio", + credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) + assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -1506,9 +1430,7 @@ def test_annotated_dataset_path(): annotated_dataset = "whelk" expected = "projects/{project}/datasets/{dataset}/annotatedDatasets/{annotated_dataset}".format( - project=project, - dataset=dataset, - annotated_dataset=annotated_dataset, + project=project, dataset=dataset, annotated_dataset=annotated_dataset, ) actual = MigrationServiceClient.annotated_dataset_path( project, dataset, annotated_dataset @@ -1535,9 +1457,7 @@ def test_dataset_path(): dataset = "winkle" expected = "projects/{project}/locations/{location}/datasets/{dataset}".format( - project=project, - location=location, - dataset=dataset, + project=project, location=location, dataset=dataset, ) actual = MigrationServiceClient.dataset_path(project, location, dataset) assert expected == actual @@ -1561,8 +1481,7 @@ def test_dataset_path(): dataset = "clam" expected = "projects/{project}/datasets/{dataset}".format( - project=project, - dataset=dataset, + project=project, dataset=dataset, ) actual = MigrationServiceClient.dataset_path(project, dataset) assert expected == actual @@ -1586,9 +1505,7 @@ def test_dataset_path(): dataset = "cuttlefish" expected = "projects/{project}/locations/{location}/datasets/{dataset}".format( - project=project, - location=location, - dataset=dataset, + project=project, location=location, dataset=dataset, ) actual = MigrationServiceClient.dataset_path(project, location, dataset) assert expected == actual @@ -1613,9 +1530,7 @@ def test_model_path(): model = "squid" expected = "projects/{project}/locations/{location}/models/{model}".format( - project=project, - location=location, - model=model, + project=project, location=location, model=model, ) actual = MigrationServiceClient.model_path(project, location, model) assert expected == actual @@ -1640,9 +1555,7 @@ def test_model_path(): model = "cuttlefish" expected = "projects/{project}/locations/{location}/models/{model}".format( - project=project, - location=location, - model=model, + project=project, location=location, model=model, ) actual = MigrationServiceClient.model_path(project, location, model) assert expected == actual @@ -1667,9 +1580,7 @@ def test_version_path(): version = "squid" expected = "projects/{project}/models/{model}/versions/{version}".format( - project=project, - model=model, - version=version, + project=project, model=model, version=version, ) actual = MigrationServiceClient.version_path(project, model, version) assert expected == actual @@ -1712,9 +1623,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "cuttlefish" - expected = "folders/{folder}".format( - folder=folder, - ) + expected = "folders/{folder}".format(folder=folder,) actual = MigrationServiceClient.common_folder_path(folder) assert expected == actual @@ -1733,9 +1642,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "winkle" - expected = "organizations/{organization}".format( - organization=organization, - ) + expected = "organizations/{organization}".format(organization=organization,) actual = MigrationServiceClient.common_organization_path(organization) assert expected == actual @@ -1754,9 +1661,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "scallop" - expected = "projects/{project}".format( - project=project, - ) + expected = "projects/{project}".format(project=project,) actual = MigrationServiceClient.common_project_path(project) assert expected == actual @@ -1777,8 +1682,7 @@ def test_common_location_path(): location = "clam" expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, + project=project, location=location, ) actual = MigrationServiceClient.common_location_path(project, location) assert expected == actual @@ -1803,8 +1707,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.MigrationServiceTransport, "_prep_wrapped_messages" ) as prep: client = MigrationServiceClient( - credentials=credentials.AnonymousCredentials(), - client_info=client_info, + credentials=credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -1813,7 +1716,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = MigrationServiceClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), - client_info=client_info, + credentials=credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_model_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_model_service.py index 97b32a4e78..d05698a46a 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_model_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_model_service.py @@ -383,9 +383,7 @@ def test_model_service_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) + options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) @@ -451,8 +449,7 @@ def test_upload_model( transport: str = "grpc", request_type=model_service.UploadModelRequest ): client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -485,8 +482,7 @@ async def test_upload_model_async( transport: str = "grpc_asyncio", request_type=model_service.UploadModelRequest ): client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -518,9 +514,7 @@ async def test_upload_model_async_from_dict(): def test_upload_model_field_headers(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -540,17 +534,12 @@ def test_upload_model_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_upload_model_field_headers_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -572,16 +561,11 @@ async def test_upload_model_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_upload_model_flattened(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.upload_model), "__call__") as call: @@ -591,8 +575,7 @@ def test_upload_model_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.upload_model( - parent="parent_value", - model=gca_model.Model(name="name_value"), + parent="parent_value", model=gca_model.Model(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -606,9 +589,7 @@ def test_upload_model_flattened(): def test_upload_model_flattened_error(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -622,9 +603,7 @@ def test_upload_model_flattened_error(): @pytest.mark.asyncio async def test_upload_model_flattened_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.upload_model), "__call__") as call: @@ -637,8 +616,7 @@ async def test_upload_model_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.upload_model( - parent="parent_value", - model=gca_model.Model(name="name_value"), + parent="parent_value", model=gca_model.Model(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -653,9 +631,7 @@ async def test_upload_model_flattened_async(): @pytest.mark.asyncio async def test_upload_model_flattened_error_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -669,8 +645,7 @@ async def test_upload_model_flattened_error_async(): def test_get_model(transport: str = "grpc", request_type=model_service.GetModelRequest): client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -743,8 +718,7 @@ async def test_get_model_async( transport: str = "grpc_asyncio", request_type=model_service.GetModelRequest ): client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -819,9 +793,7 @@ async def test_get_model_async_from_dict(): def test_get_model_field_headers(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -841,17 +813,12 @@ def test_get_model_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_model_field_headers_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -871,16 +838,11 @@ async def test_get_model_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_model_flattened(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_model), "__call__") as call: @@ -889,9 +851,7 @@ def test_get_model_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_model( - name="name_value", - ) + client.get_model(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -902,24 +862,19 @@ def test_get_model_flattened(): def test_get_model_flattened_error(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_model( - model_service.GetModelRequest(), - name="name_value", + model_service.GetModelRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_model_flattened_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_model), "__call__") as call: @@ -929,9 +884,7 @@ async def test_get_model_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(model.Model()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_model( - name="name_value", - ) + response = await client.get_model(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -943,16 +896,13 @@ async def test_get_model_flattened_async(): @pytest.mark.asyncio async def test_get_model_flattened_error_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_model( - model_service.GetModelRequest(), - name="name_value", + model_service.GetModelRequest(), name="name_value", ) @@ -960,8 +910,7 @@ def test_list_models( transport: str = "grpc", request_type=model_service.ListModelsRequest ): client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -999,8 +948,7 @@ async def test_list_models_async( transport: str = "grpc_asyncio", request_type=model_service.ListModelsRequest ): client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1011,9 +959,7 @@ async def test_list_models_async( with mock.patch.object(type(client.transport.list_models), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - model_service.ListModelsResponse( - next_page_token="next_page_token_value", - ) + model_service.ListModelsResponse(next_page_token="next_page_token_value",) ) response = await client.list_models(request) @@ -1036,9 +982,7 @@ async def test_list_models_async_from_dict(): def test_list_models_field_headers(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1058,17 +1002,12 @@ def test_list_models_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_models_field_headers_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1090,16 +1029,11 @@ async def test_list_models_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_models_flattened(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_models), "__call__") as call: @@ -1108,9 +1042,7 @@ def test_list_models_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_models( - parent="parent_value", - ) + client.list_models(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1121,24 +1053,19 @@ def test_list_models_flattened(): def test_list_models_flattened_error(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_models( - model_service.ListModelsRequest(), - parent="parent_value", + model_service.ListModelsRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_models_flattened_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_models), "__call__") as call: @@ -1150,9 +1077,7 @@ async def test_list_models_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_models( - parent="parent_value", - ) + response = await client.list_models(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1164,52 +1089,32 @@ async def test_list_models_flattened_async(): @pytest.mark.asyncio async def test_list_models_flattened_error_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_models( - model_service.ListModelsRequest(), - parent="parent_value", + model_service.ListModelsRequest(), parent="parent_value", ) def test_list_models_pager(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_models), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( model_service.ListModelsResponse( - models=[ - model.Model(), - model.Model(), - model.Model(), - ], + models=[model.Model(), model.Model(), model.Model(),], next_page_token="abc", ), + model_service.ListModelsResponse(models=[], next_page_token="def",), model_service.ListModelsResponse( - models=[], - next_page_token="def", - ), - model_service.ListModelsResponse( - models=[ - model.Model(), - ], - next_page_token="ghi", - ), - model_service.ListModelsResponse( - models=[ - model.Model(), - model.Model(), - ], + models=[model.Model(),], next_page_token="ghi", ), + model_service.ListModelsResponse(models=[model.Model(), model.Model(),],), RuntimeError, ) @@ -1227,38 +1132,21 @@ def test_list_models_pager(): def test_list_models_pages(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_models), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( model_service.ListModelsResponse( - models=[ - model.Model(), - model.Model(), - model.Model(), - ], + models=[model.Model(), model.Model(), model.Model(),], next_page_token="abc", ), + model_service.ListModelsResponse(models=[], next_page_token="def",), model_service.ListModelsResponse( - models=[], - next_page_token="def", - ), - model_service.ListModelsResponse( - models=[ - model.Model(), - ], - next_page_token="ghi", - ), - model_service.ListModelsResponse( - models=[ - model.Model(), - model.Model(), - ], + models=[model.Model(),], next_page_token="ghi", ), + model_service.ListModelsResponse(models=[model.Model(), model.Model(),],), RuntimeError, ) pages = list(client.list_models(request={}).pages) @@ -1268,9 +1156,7 @@ def test_list_models_pages(): @pytest.mark.asyncio async def test_list_models_async_pager(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1279,34 +1165,17 @@ async def test_list_models_async_pager(): # Set the response to a series of pages. call.side_effect = ( model_service.ListModelsResponse( - models=[ - model.Model(), - model.Model(), - model.Model(), - ], + models=[model.Model(), model.Model(), model.Model(),], next_page_token="abc", ), + model_service.ListModelsResponse(models=[], next_page_token="def",), model_service.ListModelsResponse( - models=[], - next_page_token="def", - ), - model_service.ListModelsResponse( - models=[ - model.Model(), - ], - next_page_token="ghi", - ), - model_service.ListModelsResponse( - models=[ - model.Model(), - model.Model(), - ], + models=[model.Model(),], next_page_token="ghi", ), + model_service.ListModelsResponse(models=[model.Model(), model.Model(),],), RuntimeError, ) - async_pager = await client.list_models( - request={}, - ) + async_pager = await client.list_models(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -1318,9 +1187,7 @@ async def test_list_models_async_pager(): @pytest.mark.asyncio async def test_list_models_async_pages(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1329,29 +1196,14 @@ async def test_list_models_async_pages(): # Set the response to a series of pages. call.side_effect = ( model_service.ListModelsResponse( - models=[ - model.Model(), - model.Model(), - model.Model(), - ], + models=[model.Model(), model.Model(), model.Model(),], next_page_token="abc", ), + model_service.ListModelsResponse(models=[], next_page_token="def",), model_service.ListModelsResponse( - models=[], - next_page_token="def", - ), - model_service.ListModelsResponse( - models=[ - model.Model(), - ], - next_page_token="ghi", - ), - model_service.ListModelsResponse( - models=[ - model.Model(), - model.Model(), - ], + models=[model.Model(),], next_page_token="ghi", ), + model_service.ListModelsResponse(models=[model.Model(), model.Model(),],), RuntimeError, ) pages = [] @@ -1365,8 +1217,7 @@ def test_update_model( transport: str = "grpc", request_type=model_service.UpdateModelRequest ): client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1439,8 +1290,7 @@ async def test_update_model_async( transport: str = "grpc_asyncio", request_type=model_service.UpdateModelRequest ): client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1515,9 +1365,7 @@ async def test_update_model_async_from_dict(): def test_update_model_field_headers(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1537,17 +1385,12 @@ def test_update_model_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "model.name=model.name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "model.name=model.name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_update_model_field_headers_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1567,16 +1410,11 @@ async def test_update_model_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "model.name=model.name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "model.name=model.name/value",) in kw["metadata"] def test_update_model_flattened(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_model), "__call__") as call: @@ -1601,9 +1439,7 @@ def test_update_model_flattened(): def test_update_model_flattened_error(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1617,9 +1453,7 @@ def test_update_model_flattened_error(): @pytest.mark.asyncio async def test_update_model_flattened_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_model), "__call__") as call: @@ -1646,9 +1480,7 @@ async def test_update_model_flattened_async(): @pytest.mark.asyncio async def test_update_model_flattened_error_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1664,8 +1496,7 @@ def test_delete_model( transport: str = "grpc", request_type=model_service.DeleteModelRequest ): client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1698,8 +1529,7 @@ async def test_delete_model_async( transport: str = "grpc_asyncio", request_type=model_service.DeleteModelRequest ): client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1731,9 +1561,7 @@ async def test_delete_model_async_from_dict(): def test_delete_model_field_headers(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1753,17 +1581,12 @@ def test_delete_model_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_delete_model_field_headers_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1785,16 +1608,11 @@ async def test_delete_model_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_delete_model_flattened(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_model), "__call__") as call: @@ -1803,9 +1621,7 @@ def test_delete_model_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_model( - name="name_value", - ) + client.delete_model(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1816,24 +1632,19 @@ def test_delete_model_flattened(): def test_delete_model_flattened_error(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_model( - model_service.DeleteModelRequest(), - name="name_value", + model_service.DeleteModelRequest(), name="name_value", ) @pytest.mark.asyncio async def test_delete_model_flattened_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_model), "__call__") as call: @@ -1845,9 +1656,7 @@ async def test_delete_model_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_model( - name="name_value", - ) + response = await client.delete_model(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1859,16 +1668,13 @@ async def test_delete_model_flattened_async(): @pytest.mark.asyncio async def test_delete_model_flattened_error_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_model( - model_service.DeleteModelRequest(), - name="name_value", + model_service.DeleteModelRequest(), name="name_value", ) @@ -1876,8 +1682,7 @@ def test_export_model( transport: str = "grpc", request_type=model_service.ExportModelRequest ): client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1910,8 +1715,7 @@ async def test_export_model_async( transport: str = "grpc_asyncio", request_type=model_service.ExportModelRequest ): client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1943,9 +1747,7 @@ async def test_export_model_async_from_dict(): def test_export_model_field_headers(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1965,17 +1767,12 @@ def test_export_model_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_export_model_field_headers_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1997,16 +1794,11 @@ async def test_export_model_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_export_model_flattened(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.export_model), "__call__") as call: @@ -2035,9 +1827,7 @@ def test_export_model_flattened(): def test_export_model_flattened_error(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2053,9 +1843,7 @@ def test_export_model_flattened_error(): @pytest.mark.asyncio async def test_export_model_flattened_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.export_model), "__call__") as call: @@ -2088,9 +1876,7 @@ async def test_export_model_flattened_async(): @pytest.mark.asyncio async def test_export_model_flattened_error_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2108,8 +1894,7 @@ def test_get_model_evaluation( transport: str = "grpc", request_type=model_service.GetModelEvaluationRequest ): client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2156,8 +1941,7 @@ async def test_get_model_evaluation_async( request_type=model_service.GetModelEvaluationRequest, ): client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2201,9 +1985,7 @@ async def test_get_model_evaluation_async_from_dict(): def test_get_model_evaluation_field_headers(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2225,17 +2007,12 @@ def test_get_model_evaluation_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_model_evaluation_field_headers_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2259,16 +2036,11 @@ async def test_get_model_evaluation_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_model_evaluation_flattened(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2279,9 +2051,7 @@ def test_get_model_evaluation_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_model_evaluation( - name="name_value", - ) + client.get_model_evaluation(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -2292,24 +2062,19 @@ def test_get_model_evaluation_flattened(): def test_get_model_evaluation_flattened_error(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_model_evaluation( - model_service.GetModelEvaluationRequest(), - name="name_value", + model_service.GetModelEvaluationRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_model_evaluation_flattened_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2323,9 +2088,7 @@ async def test_get_model_evaluation_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_model_evaluation( - name="name_value", - ) + response = await client.get_model_evaluation(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -2337,16 +2100,13 @@ async def test_get_model_evaluation_flattened_async(): @pytest.mark.asyncio async def test_get_model_evaluation_flattened_error_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_model_evaluation( - model_service.GetModelEvaluationRequest(), - name="name_value", + model_service.GetModelEvaluationRequest(), name="name_value", ) @@ -2354,8 +2114,7 @@ def test_list_model_evaluations( transport: str = "grpc", request_type=model_service.ListModelEvaluationsRequest ): client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2396,8 +2155,7 @@ async def test_list_model_evaluations_async( request_type=model_service.ListModelEvaluationsRequest, ): client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2435,9 +2193,7 @@ async def test_list_model_evaluations_async_from_dict(): def test_list_model_evaluations_field_headers(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2459,17 +2215,12 @@ def test_list_model_evaluations_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_model_evaluations_field_headers_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2493,16 +2244,11 @@ async def test_list_model_evaluations_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_model_evaluations_flattened(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2513,9 +2259,7 @@ def test_list_model_evaluations_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_model_evaluations( - parent="parent_value", - ) + client.list_model_evaluations(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -2526,24 +2270,19 @@ def test_list_model_evaluations_flattened(): def test_list_model_evaluations_flattened_error(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_model_evaluations( - model_service.ListModelEvaluationsRequest(), - parent="parent_value", + model_service.ListModelEvaluationsRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_model_evaluations_flattened_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2557,9 +2296,7 @@ async def test_list_model_evaluations_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_model_evaluations( - parent="parent_value", - ) + response = await client.list_model_evaluations(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -2571,23 +2308,18 @@ async def test_list_model_evaluations_flattened_async(): @pytest.mark.asyncio async def test_list_model_evaluations_flattened_error_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_model_evaluations( - model_service.ListModelEvaluationsRequest(), - parent="parent_value", + model_service.ListModelEvaluationsRequest(), parent="parent_value", ) def test_list_model_evaluations_pager(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2604,13 +2336,10 @@ def test_list_model_evaluations_pager(): next_page_token="abc", ), model_service.ListModelEvaluationsResponse( - model_evaluations=[], - next_page_token="def", + model_evaluations=[], next_page_token="def", ), model_service.ListModelEvaluationsResponse( - model_evaluations=[ - model_evaluation.ModelEvaluation(), - ], + model_evaluations=[model_evaluation.ModelEvaluation(),], next_page_token="ghi", ), model_service.ListModelEvaluationsResponse( @@ -2636,9 +2365,7 @@ def test_list_model_evaluations_pager(): def test_list_model_evaluations_pages(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2655,13 +2382,10 @@ def test_list_model_evaluations_pages(): next_page_token="abc", ), model_service.ListModelEvaluationsResponse( - model_evaluations=[], - next_page_token="def", + model_evaluations=[], next_page_token="def", ), model_service.ListModelEvaluationsResponse( - model_evaluations=[ - model_evaluation.ModelEvaluation(), - ], + model_evaluations=[model_evaluation.ModelEvaluation(),], next_page_token="ghi", ), model_service.ListModelEvaluationsResponse( @@ -2679,9 +2403,7 @@ def test_list_model_evaluations_pages(): @pytest.mark.asyncio async def test_list_model_evaluations_async_pager(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2700,13 +2422,10 @@ async def test_list_model_evaluations_async_pager(): next_page_token="abc", ), model_service.ListModelEvaluationsResponse( - model_evaluations=[], - next_page_token="def", + model_evaluations=[], next_page_token="def", ), model_service.ListModelEvaluationsResponse( - model_evaluations=[ - model_evaluation.ModelEvaluation(), - ], + model_evaluations=[model_evaluation.ModelEvaluation(),], next_page_token="ghi", ), model_service.ListModelEvaluationsResponse( @@ -2717,9 +2436,7 @@ async def test_list_model_evaluations_async_pager(): ), RuntimeError, ) - async_pager = await client.list_model_evaluations( - request={}, - ) + async_pager = await client.list_model_evaluations(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -2731,9 +2448,7 @@ async def test_list_model_evaluations_async_pager(): @pytest.mark.asyncio async def test_list_model_evaluations_async_pages(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2752,13 +2467,10 @@ async def test_list_model_evaluations_async_pages(): next_page_token="abc", ), model_service.ListModelEvaluationsResponse( - model_evaluations=[], - next_page_token="def", + model_evaluations=[], next_page_token="def", ), model_service.ListModelEvaluationsResponse( - model_evaluations=[ - model_evaluation.ModelEvaluation(), - ], + model_evaluations=[model_evaluation.ModelEvaluation(),], next_page_token="ghi", ), model_service.ListModelEvaluationsResponse( @@ -2780,8 +2492,7 @@ def test_get_model_evaluation_slice( transport: str = "grpc", request_type=model_service.GetModelEvaluationSliceRequest ): client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2794,8 +2505,7 @@ def test_get_model_evaluation_slice( ) as call: # Designate an appropriate return value for the call. call.return_value = model_evaluation_slice.ModelEvaluationSlice( - name="name_value", - metrics_schema_uri="metrics_schema_uri_value", + name="name_value", metrics_schema_uri="metrics_schema_uri_value", ) response = client.get_model_evaluation_slice(request) @@ -2825,8 +2535,7 @@ async def test_get_model_evaluation_slice_async( request_type=model_service.GetModelEvaluationSliceRequest, ): client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2840,8 +2549,7 @@ async def test_get_model_evaluation_slice_async( # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( model_evaluation_slice.ModelEvaluationSlice( - name="name_value", - metrics_schema_uri="metrics_schema_uri_value", + name="name_value", metrics_schema_uri="metrics_schema_uri_value", ) ) @@ -2867,9 +2575,7 @@ async def test_get_model_evaluation_slice_async_from_dict(): def test_get_model_evaluation_slice_field_headers(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2891,17 +2597,12 @@ def test_get_model_evaluation_slice_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_model_evaluation_slice_field_headers_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2925,16 +2626,11 @@ async def test_get_model_evaluation_slice_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_model_evaluation_slice_flattened(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2945,9 +2641,7 @@ def test_get_model_evaluation_slice_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_model_evaluation_slice( - name="name_value", - ) + client.get_model_evaluation_slice(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -2958,24 +2652,19 @@ def test_get_model_evaluation_slice_flattened(): def test_get_model_evaluation_slice_flattened_error(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_model_evaluation_slice( - model_service.GetModelEvaluationSliceRequest(), - name="name_value", + model_service.GetModelEvaluationSliceRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_model_evaluation_slice_flattened_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2989,9 +2678,7 @@ async def test_get_model_evaluation_slice_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_model_evaluation_slice( - name="name_value", - ) + response = await client.get_model_evaluation_slice(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -3003,16 +2690,13 @@ async def test_get_model_evaluation_slice_flattened_async(): @pytest.mark.asyncio async def test_get_model_evaluation_slice_flattened_error_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_model_evaluation_slice( - model_service.GetModelEvaluationSliceRequest(), - name="name_value", + model_service.GetModelEvaluationSliceRequest(), name="name_value", ) @@ -3020,8 +2704,7 @@ def test_list_model_evaluation_slices( transport: str = "grpc", request_type=model_service.ListModelEvaluationSlicesRequest ): client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3062,8 +2745,7 @@ async def test_list_model_evaluation_slices_async( request_type=model_service.ListModelEvaluationSlicesRequest, ): client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3101,9 +2783,7 @@ async def test_list_model_evaluation_slices_async_from_dict(): def test_list_model_evaluation_slices_field_headers(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3125,17 +2805,12 @@ def test_list_model_evaluation_slices_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_model_evaluation_slices_field_headers_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3159,16 +2834,11 @@ async def test_list_model_evaluation_slices_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_model_evaluation_slices_flattened(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3179,9 +2849,7 @@ def test_list_model_evaluation_slices_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_model_evaluation_slices( - parent="parent_value", - ) + client.list_model_evaluation_slices(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -3192,24 +2860,19 @@ def test_list_model_evaluation_slices_flattened(): def test_list_model_evaluation_slices_flattened_error(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_model_evaluation_slices( - model_service.ListModelEvaluationSlicesRequest(), - parent="parent_value", + model_service.ListModelEvaluationSlicesRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_model_evaluation_slices_flattened_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3223,9 +2886,7 @@ async def test_list_model_evaluation_slices_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_model_evaluation_slices( - parent="parent_value", - ) + response = await client.list_model_evaluation_slices(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -3237,23 +2898,18 @@ async def test_list_model_evaluation_slices_flattened_async(): @pytest.mark.asyncio async def test_list_model_evaluation_slices_flattened_error_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_model_evaluation_slices( - model_service.ListModelEvaluationSlicesRequest(), - parent="parent_value", + model_service.ListModelEvaluationSlicesRequest(), parent="parent_value", ) def test_list_model_evaluation_slices_pager(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3270,8 +2926,7 @@ def test_list_model_evaluation_slices_pager(): next_page_token="abc", ), model_service.ListModelEvaluationSlicesResponse( - model_evaluation_slices=[], - next_page_token="def", + model_evaluation_slices=[], next_page_token="def", ), model_service.ListModelEvaluationSlicesResponse( model_evaluation_slices=[ @@ -3304,9 +2959,7 @@ def test_list_model_evaluation_slices_pager(): def test_list_model_evaluation_slices_pages(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3323,8 +2976,7 @@ def test_list_model_evaluation_slices_pages(): next_page_token="abc", ), model_service.ListModelEvaluationSlicesResponse( - model_evaluation_slices=[], - next_page_token="def", + model_evaluation_slices=[], next_page_token="def", ), model_service.ListModelEvaluationSlicesResponse( model_evaluation_slices=[ @@ -3347,9 +2999,7 @@ def test_list_model_evaluation_slices_pages(): @pytest.mark.asyncio async def test_list_model_evaluation_slices_async_pager(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3368,8 +3018,7 @@ async def test_list_model_evaluation_slices_async_pager(): next_page_token="abc", ), model_service.ListModelEvaluationSlicesResponse( - model_evaluation_slices=[], - next_page_token="def", + model_evaluation_slices=[], next_page_token="def", ), model_service.ListModelEvaluationSlicesResponse( model_evaluation_slices=[ @@ -3385,9 +3034,7 @@ async def test_list_model_evaluation_slices_async_pager(): ), RuntimeError, ) - async_pager = await client.list_model_evaluation_slices( - request={}, - ) + async_pager = await client.list_model_evaluation_slices(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -3402,9 +3049,7 @@ async def test_list_model_evaluation_slices_async_pager(): @pytest.mark.asyncio async def test_list_model_evaluation_slices_async_pages(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3423,8 +3068,7 @@ async def test_list_model_evaluation_slices_async_pages(): next_page_token="abc", ), model_service.ListModelEvaluationSlicesResponse( - model_evaluation_slices=[], - next_page_token="def", + model_evaluation_slices=[], next_page_token="def", ), model_service.ListModelEvaluationSlicesResponse( model_evaluation_slices=[ @@ -3456,8 +3100,7 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -3476,8 +3119,7 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = ModelServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + client_options={"scopes": ["1", "2"]}, transport=transport, ) @@ -3519,13 +3161,8 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.ModelServiceGrpcTransport, - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client.transport, transports.ModelServiceGrpcTransport,) def test_model_service_base_transport_error(): @@ -3581,8 +3218,7 @@ def test_model_service_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (credentials.AnonymousCredentials(), None) transport = transports.ModelServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", + credentials_file="credentials.json", quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -3652,8 +3288,7 @@ def test_model_service_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.ModelServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -3665,8 +3300,7 @@ def test_model_service_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.ModelServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -3755,16 +3389,12 @@ def test_model_service_transport_channel_mtls_with_adc(transport_class): def test_model_service_grpc_lro_client(): client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - transport="grpc", + credentials=credentials.AnonymousCredentials(), transport="grpc", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) + assert isinstance(transport.operations_client, operations_v1.OperationsClient,) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -3772,16 +3402,12 @@ def test_model_service_grpc_lro_client(): def test_model_service_grpc_lro_async_client(): client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport="grpc_asyncio", + credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) + assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -3793,9 +3419,7 @@ def test_endpoint_path(): endpoint = "whelk" expected = "projects/{project}/locations/{location}/endpoints/{endpoint}".format( - project=project, - location=location, - endpoint=endpoint, + project=project, location=location, endpoint=endpoint, ) actual = ModelServiceClient.endpoint_path(project, location, endpoint) assert expected == actual @@ -3820,9 +3444,7 @@ def test_model_path(): model = "winkle" expected = "projects/{project}/locations/{location}/models/{model}".format( - project=project, - location=location, - model=model, + project=project, location=location, model=model, ) actual = ModelServiceClient.model_path(project, location, model) assert expected == actual @@ -3848,10 +3470,7 @@ def test_model_evaluation_path(): evaluation = "octopus" expected = "projects/{project}/locations/{location}/models/{model}/evaluations/{evaluation}".format( - project=project, - location=location, - model=model, - evaluation=evaluation, + project=project, location=location, model=model, evaluation=evaluation, ) actual = ModelServiceClient.model_evaluation_path( project, location, model, evaluation @@ -3914,9 +3533,7 @@ def test_training_pipeline_path(): training_pipeline = "winkle" expected = "projects/{project}/locations/{location}/trainingPipelines/{training_pipeline}".format( - project=project, - location=location, - training_pipeline=training_pipeline, + project=project, location=location, training_pipeline=training_pipeline, ) actual = ModelServiceClient.training_pipeline_path( project, location, training_pipeline @@ -3961,9 +3578,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format( - folder=folder, - ) + expected = "folders/{folder}".format(folder=folder,) actual = ModelServiceClient.common_folder_path(folder) assert expected == actual @@ -3982,9 +3597,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format( - organization=organization, - ) + expected = "organizations/{organization}".format(organization=organization,) actual = ModelServiceClient.common_organization_path(organization) assert expected == actual @@ -4003,9 +3616,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format( - project=project, - ) + expected = "projects/{project}".format(project=project,) actual = ModelServiceClient.common_project_path(project) assert expected == actual @@ -4026,8 +3637,7 @@ def test_common_location_path(): location = "nautilus" expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, + project=project, location=location, ) actual = ModelServiceClient.common_location_path(project, location) assert expected == actual @@ -4052,8 +3662,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.ModelServiceTransport, "_prep_wrapped_messages" ) as prep: client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - client_info=client_info, + credentials=credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -4062,7 +3671,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = ModelServiceClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), - client_info=client_info, + credentials=credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_pipeline_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_pipeline_service.py index 97e4132173..ada82b91c0 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_pipeline_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_pipeline_service.py @@ -408,9 +408,7 @@ def test_pipeline_service_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) + options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) @@ -478,8 +476,7 @@ def test_create_training_pipeline( transport: str = "grpc", request_type=pipeline_service.CreateTrainingPipelineRequest ): client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -529,8 +526,7 @@ async def test_create_training_pipeline_async( request_type=pipeline_service.CreateTrainingPipelineRequest, ): client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -577,9 +573,7 @@ async def test_create_training_pipeline_async_from_dict(): def test_create_training_pipeline_field_headers(): - client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -601,17 +595,12 @@ def test_create_training_pipeline_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_create_training_pipeline_field_headers_async(): - client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -635,16 +624,11 @@ async def test_create_training_pipeline_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_create_training_pipeline_flattened(): - client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -673,9 +657,7 @@ def test_create_training_pipeline_flattened(): def test_create_training_pipeline_flattened_error(): - client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -689,9 +671,7 @@ def test_create_training_pipeline_flattened_error(): @pytest.mark.asyncio async def test_create_training_pipeline_flattened_async(): - client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -724,9 +704,7 @@ async def test_create_training_pipeline_flattened_async(): @pytest.mark.asyncio async def test_create_training_pipeline_flattened_error_async(): - client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -742,8 +720,7 @@ def test_get_training_pipeline( transport: str = "grpc", request_type=pipeline_service.GetTrainingPipelineRequest ): client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -793,8 +770,7 @@ async def test_get_training_pipeline_async( request_type=pipeline_service.GetTrainingPipelineRequest, ): client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -841,9 +817,7 @@ async def test_get_training_pipeline_async_from_dict(): def test_get_training_pipeline_field_headers(): - client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -865,17 +839,12 @@ def test_get_training_pipeline_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_training_pipeline_field_headers_async(): - client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -899,16 +868,11 @@ async def test_get_training_pipeline_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_training_pipeline_flattened(): - client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -919,9 +883,7 @@ def test_get_training_pipeline_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_training_pipeline( - name="name_value", - ) + client.get_training_pipeline(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -932,24 +894,19 @@ def test_get_training_pipeline_flattened(): def test_get_training_pipeline_flattened_error(): - client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_training_pipeline( - pipeline_service.GetTrainingPipelineRequest(), - name="name_value", + pipeline_service.GetTrainingPipelineRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_training_pipeline_flattened_async(): - client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -963,9 +920,7 @@ async def test_get_training_pipeline_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_training_pipeline( - name="name_value", - ) + response = await client.get_training_pipeline(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -977,16 +932,13 @@ async def test_get_training_pipeline_flattened_async(): @pytest.mark.asyncio async def test_get_training_pipeline_flattened_error_async(): - client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_training_pipeline( - pipeline_service.GetTrainingPipelineRequest(), - name="name_value", + pipeline_service.GetTrainingPipelineRequest(), name="name_value", ) @@ -994,8 +946,7 @@ def test_list_training_pipelines( transport: str = "grpc", request_type=pipeline_service.ListTrainingPipelinesRequest ): client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1036,8 +987,7 @@ async def test_list_training_pipelines_async( request_type=pipeline_service.ListTrainingPipelinesRequest, ): client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1075,9 +1025,7 @@ async def test_list_training_pipelines_async_from_dict(): def test_list_training_pipelines_field_headers(): - client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1099,17 +1047,12 @@ def test_list_training_pipelines_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_training_pipelines_field_headers_async(): - client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1133,16 +1076,11 @@ async def test_list_training_pipelines_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_training_pipelines_flattened(): - client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1153,9 +1091,7 @@ def test_list_training_pipelines_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_training_pipelines( - parent="parent_value", - ) + client.list_training_pipelines(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1166,24 +1102,19 @@ def test_list_training_pipelines_flattened(): def test_list_training_pipelines_flattened_error(): - client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_training_pipelines( - pipeline_service.ListTrainingPipelinesRequest(), - parent="parent_value", + pipeline_service.ListTrainingPipelinesRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_training_pipelines_flattened_async(): - client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1197,9 +1128,7 @@ async def test_list_training_pipelines_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_training_pipelines( - parent="parent_value", - ) + response = await client.list_training_pipelines(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1211,23 +1140,18 @@ async def test_list_training_pipelines_flattened_async(): @pytest.mark.asyncio async def test_list_training_pipelines_flattened_error_async(): - client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_training_pipelines( - pipeline_service.ListTrainingPipelinesRequest(), - parent="parent_value", + pipeline_service.ListTrainingPipelinesRequest(), parent="parent_value", ) def test_list_training_pipelines_pager(): - client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = PipelineServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1244,13 +1168,10 @@ def test_list_training_pipelines_pager(): next_page_token="abc", ), pipeline_service.ListTrainingPipelinesResponse( - training_pipelines=[], - next_page_token="def", + training_pipelines=[], next_page_token="def", ), pipeline_service.ListTrainingPipelinesResponse( - training_pipelines=[ - training_pipeline.TrainingPipeline(), - ], + training_pipelines=[training_pipeline.TrainingPipeline(),], next_page_token="ghi", ), pipeline_service.ListTrainingPipelinesResponse( @@ -1276,9 +1197,7 @@ def test_list_training_pipelines_pager(): def test_list_training_pipelines_pages(): - client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = PipelineServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1295,13 +1214,10 @@ def test_list_training_pipelines_pages(): next_page_token="abc", ), pipeline_service.ListTrainingPipelinesResponse( - training_pipelines=[], - next_page_token="def", + training_pipelines=[], next_page_token="def", ), pipeline_service.ListTrainingPipelinesResponse( - training_pipelines=[ - training_pipeline.TrainingPipeline(), - ], + training_pipelines=[training_pipeline.TrainingPipeline(),], next_page_token="ghi", ), pipeline_service.ListTrainingPipelinesResponse( @@ -1319,9 +1235,7 @@ def test_list_training_pipelines_pages(): @pytest.mark.asyncio async def test_list_training_pipelines_async_pager(): - client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1340,13 +1254,10 @@ async def test_list_training_pipelines_async_pager(): next_page_token="abc", ), pipeline_service.ListTrainingPipelinesResponse( - training_pipelines=[], - next_page_token="def", + training_pipelines=[], next_page_token="def", ), pipeline_service.ListTrainingPipelinesResponse( - training_pipelines=[ - training_pipeline.TrainingPipeline(), - ], + training_pipelines=[training_pipeline.TrainingPipeline(),], next_page_token="ghi", ), pipeline_service.ListTrainingPipelinesResponse( @@ -1357,9 +1268,7 @@ async def test_list_training_pipelines_async_pager(): ), RuntimeError, ) - async_pager = await client.list_training_pipelines( - request={}, - ) + async_pager = await client.list_training_pipelines(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -1371,9 +1280,7 @@ async def test_list_training_pipelines_async_pager(): @pytest.mark.asyncio async def test_list_training_pipelines_async_pages(): - client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1392,13 +1299,10 @@ async def test_list_training_pipelines_async_pages(): next_page_token="abc", ), pipeline_service.ListTrainingPipelinesResponse( - training_pipelines=[], - next_page_token="def", + training_pipelines=[], next_page_token="def", ), pipeline_service.ListTrainingPipelinesResponse( - training_pipelines=[ - training_pipeline.TrainingPipeline(), - ], + training_pipelines=[training_pipeline.TrainingPipeline(),], next_page_token="ghi", ), pipeline_service.ListTrainingPipelinesResponse( @@ -1420,8 +1324,7 @@ def test_delete_training_pipeline( transport: str = "grpc", request_type=pipeline_service.DeleteTrainingPipelineRequest ): client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1457,8 +1360,7 @@ async def test_delete_training_pipeline_async( request_type=pipeline_service.DeleteTrainingPipelineRequest, ): client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1492,9 +1394,7 @@ async def test_delete_training_pipeline_async_from_dict(): def test_delete_training_pipeline_field_headers(): - client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1516,17 +1416,12 @@ def test_delete_training_pipeline_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_delete_training_pipeline_field_headers_async(): - client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1550,16 +1445,11 @@ async def test_delete_training_pipeline_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_delete_training_pipeline_flattened(): - client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1570,9 +1460,7 @@ def test_delete_training_pipeline_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_training_pipeline( - name="name_value", - ) + client.delete_training_pipeline(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1583,24 +1471,19 @@ def test_delete_training_pipeline_flattened(): def test_delete_training_pipeline_flattened_error(): - client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_training_pipeline( - pipeline_service.DeleteTrainingPipelineRequest(), - name="name_value", + pipeline_service.DeleteTrainingPipelineRequest(), name="name_value", ) @pytest.mark.asyncio async def test_delete_training_pipeline_flattened_async(): - client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1614,9 +1497,7 @@ async def test_delete_training_pipeline_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_training_pipeline( - name="name_value", - ) + response = await client.delete_training_pipeline(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1628,16 +1509,13 @@ async def test_delete_training_pipeline_flattened_async(): @pytest.mark.asyncio async def test_delete_training_pipeline_flattened_error_async(): - client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_training_pipeline( - pipeline_service.DeleteTrainingPipelineRequest(), - name="name_value", + pipeline_service.DeleteTrainingPipelineRequest(), name="name_value", ) @@ -1645,8 +1523,7 @@ def test_cancel_training_pipeline( transport: str = "grpc", request_type=pipeline_service.CancelTrainingPipelineRequest ): client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1682,8 +1559,7 @@ async def test_cancel_training_pipeline_async( request_type=pipeline_service.CancelTrainingPipelineRequest, ): client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1715,9 +1591,7 @@ async def test_cancel_training_pipeline_async_from_dict(): def test_cancel_training_pipeline_field_headers(): - client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1739,17 +1613,12 @@ def test_cancel_training_pipeline_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_cancel_training_pipeline_field_headers_async(): - client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1771,16 +1640,11 @@ async def test_cancel_training_pipeline_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_cancel_training_pipeline_flattened(): - client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1791,9 +1655,7 @@ def test_cancel_training_pipeline_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.cancel_training_pipeline( - name="name_value", - ) + client.cancel_training_pipeline(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1804,24 +1666,19 @@ def test_cancel_training_pipeline_flattened(): def test_cancel_training_pipeline_flattened_error(): - client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.cancel_training_pipeline( - pipeline_service.CancelTrainingPipelineRequest(), - name="name_value", + pipeline_service.CancelTrainingPipelineRequest(), name="name_value", ) @pytest.mark.asyncio async def test_cancel_training_pipeline_flattened_async(): - client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1833,9 +1690,7 @@ async def test_cancel_training_pipeline_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.cancel_training_pipeline( - name="name_value", - ) + response = await client.cancel_training_pipeline(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1847,16 +1702,13 @@ async def test_cancel_training_pipeline_flattened_async(): @pytest.mark.asyncio async def test_cancel_training_pipeline_flattened_error_async(): - client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.cancel_training_pipeline( - pipeline_service.CancelTrainingPipelineRequest(), - name="name_value", + pipeline_service.CancelTrainingPipelineRequest(), name="name_value", ) @@ -1867,8 +1719,7 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -1887,8 +1738,7 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = PipelineServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + client_options={"scopes": ["1", "2"]}, transport=transport, ) @@ -1933,13 +1783,8 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.PipelineServiceGrpcTransport, - ) + client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client.transport, transports.PipelineServiceGrpcTransport,) def test_pipeline_service_base_transport_error(): @@ -1990,8 +1835,7 @@ def test_pipeline_service_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (credentials.AnonymousCredentials(), None) transport = transports.PipelineServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", + credentials_file="credentials.json", quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -2061,8 +1905,7 @@ def test_pipeline_service_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.PipelineServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2074,8 +1917,7 @@ def test_pipeline_service_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.PipelineServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2172,16 +2014,12 @@ def test_pipeline_service_transport_channel_mtls_with_adc(transport_class): def test_pipeline_service_grpc_lro_client(): client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - transport="grpc", + credentials=credentials.AnonymousCredentials(), transport="grpc", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) + assert isinstance(transport.operations_client, operations_v1.OperationsClient,) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -2189,16 +2027,12 @@ def test_pipeline_service_grpc_lro_client(): def test_pipeline_service_grpc_lro_async_client(): client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport="grpc_asyncio", + credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) + assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -2210,9 +2044,7 @@ def test_endpoint_path(): endpoint = "whelk" expected = "projects/{project}/locations/{location}/endpoints/{endpoint}".format( - project=project, - location=location, - endpoint=endpoint, + project=project, location=location, endpoint=endpoint, ) actual = PipelineServiceClient.endpoint_path(project, location, endpoint) assert expected == actual @@ -2237,9 +2069,7 @@ def test_model_path(): model = "winkle" expected = "projects/{project}/locations/{location}/models/{model}".format( - project=project, - location=location, - model=model, + project=project, location=location, model=model, ) actual = PipelineServiceClient.model_path(project, location, model) assert expected == actual @@ -2264,9 +2094,7 @@ def test_training_pipeline_path(): training_pipeline = "whelk" expected = "projects/{project}/locations/{location}/trainingPipelines/{training_pipeline}".format( - project=project, - location=location, - training_pipeline=training_pipeline, + project=project, location=location, training_pipeline=training_pipeline, ) actual = PipelineServiceClient.training_pipeline_path( project, location, training_pipeline @@ -2311,9 +2139,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "winkle" - expected = "folders/{folder}".format( - folder=folder, - ) + expected = "folders/{folder}".format(folder=folder,) actual = PipelineServiceClient.common_folder_path(folder) assert expected == actual @@ -2332,9 +2158,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "scallop" - expected = "organizations/{organization}".format( - organization=organization, - ) + expected = "organizations/{organization}".format(organization=organization,) actual = PipelineServiceClient.common_organization_path(organization) assert expected == actual @@ -2353,9 +2177,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "squid" - expected = "projects/{project}".format( - project=project, - ) + expected = "projects/{project}".format(project=project,) actual = PipelineServiceClient.common_project_path(project) assert expected == actual @@ -2376,8 +2198,7 @@ def test_common_location_path(): location = "octopus" expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, + project=project, location=location, ) actual = PipelineServiceClient.common_location_path(project, location) assert expected == actual @@ -2402,8 +2223,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.PipelineServiceTransport, "_prep_wrapped_messages" ) as prep: client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - client_info=client_info, + credentials=credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2412,7 +2232,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = PipelineServiceClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), - client_info=client_info, + credentials=credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_prediction_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_prediction_service.py index 6c9f551aa2..e47e0f62c5 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_prediction_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_prediction_service.py @@ -389,9 +389,7 @@ def test_prediction_service_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) + options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) @@ -459,8 +457,7 @@ def test_predict( transport: str = "grpc", request_type=prediction_service.PredictRequest ): client = PredictionServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -498,8 +495,7 @@ async def test_predict_async( transport: str = "grpc_asyncio", request_type=prediction_service.PredictRequest ): client = PredictionServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -535,9 +531,7 @@ async def test_predict_async_from_dict(): def test_predict_field_headers(): - client = PredictionServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PredictionServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -557,10 +551,7 @@ def test_predict_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "endpoint=endpoint/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "endpoint=endpoint/value",) in kw["metadata"] @pytest.mark.asyncio @@ -589,16 +580,11 @@ async def test_predict_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "endpoint=endpoint/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "endpoint=endpoint/value",) in kw["metadata"] def test_predict_flattened(): - client = PredictionServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PredictionServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.predict), "__call__") as call: @@ -631,9 +617,7 @@ def test_predict_flattened(): def test_predict_flattened_error(): - client = PredictionServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PredictionServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -706,8 +690,7 @@ def test_explain( transport: str = "grpc", request_type=prediction_service.ExplainRequest ): client = PredictionServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -745,8 +728,7 @@ async def test_explain_async( transport: str = "grpc_asyncio", request_type=prediction_service.ExplainRequest ): client = PredictionServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -782,9 +764,7 @@ async def test_explain_async_from_dict(): def test_explain_field_headers(): - client = PredictionServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PredictionServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -804,10 +784,7 @@ def test_explain_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "endpoint=endpoint/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "endpoint=endpoint/value",) in kw["metadata"] @pytest.mark.asyncio @@ -836,16 +813,11 @@ async def test_explain_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "endpoint=endpoint/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "endpoint=endpoint/value",) in kw["metadata"] def test_explain_flattened(): - client = PredictionServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PredictionServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.explain), "__call__") as call: @@ -881,9 +853,7 @@ def test_explain_flattened(): def test_explain_flattened_error(): - client = PredictionServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PredictionServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -964,8 +934,7 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = PredictionServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -984,8 +953,7 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = PredictionServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + client_options={"scopes": ["1", "2"]}, transport=transport, ) @@ -1030,13 +998,8 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = PredictionServiceClient( - credentials=credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.PredictionServiceGrpcTransport, - ) + client = PredictionServiceClient(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client.transport, transports.PredictionServiceGrpcTransport,) def test_prediction_service_base_transport_error(): @@ -1079,8 +1042,7 @@ def test_prediction_service_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (credentials.AnonymousCredentials(), None) transport = transports.PredictionServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", + credentials_file="credentials.json", quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -1150,8 +1112,7 @@ def test_prediction_service_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.PredictionServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -1163,8 +1124,7 @@ def test_prediction_service_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.PredictionServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -1265,9 +1225,7 @@ def test_endpoint_path(): endpoint = "whelk" expected = "projects/{project}/locations/{location}/endpoints/{endpoint}".format( - project=project, - location=location, - endpoint=endpoint, + project=project, location=location, endpoint=endpoint, ) actual = PredictionServiceClient.endpoint_path(project, location, endpoint) assert expected == actual @@ -1310,9 +1268,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "winkle" - expected = "folders/{folder}".format( - folder=folder, - ) + expected = "folders/{folder}".format(folder=folder,) actual = PredictionServiceClient.common_folder_path(folder) assert expected == actual @@ -1331,9 +1287,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "scallop" - expected = "organizations/{organization}".format( - organization=organization, - ) + expected = "organizations/{organization}".format(organization=organization,) actual = PredictionServiceClient.common_organization_path(organization) assert expected == actual @@ -1352,9 +1306,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "squid" - expected = "projects/{project}".format( - project=project, - ) + expected = "projects/{project}".format(project=project,) actual = PredictionServiceClient.common_project_path(project) assert expected == actual @@ -1375,8 +1327,7 @@ def test_common_location_path(): location = "octopus" expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, + project=project, location=location, ) actual = PredictionServiceClient.common_location_path(project, location) assert expected == actual @@ -1401,8 +1352,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.PredictionServiceTransport, "_prep_wrapped_messages" ) as prep: client = PredictionServiceClient( - credentials=credentials.AnonymousCredentials(), - client_info=client_info, + credentials=credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -1411,7 +1361,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = PredictionServiceClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), - client_info=client_info, + credentials=credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_specialist_pool_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_specialist_pool_service.py index e08177ca7e..6c1061d588 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_specialist_pool_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_specialist_pool_service.py @@ -404,9 +404,7 @@ def test_specialist_pool_service_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) + options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) @@ -479,8 +477,7 @@ def test_create_specialist_pool( request_type=specialist_pool_service.CreateSpecialistPoolRequest, ): client = SpecialistPoolServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -516,8 +513,7 @@ async def test_create_specialist_pool_async( request_type=specialist_pool_service.CreateSpecialistPoolRequest, ): client = SpecialistPoolServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -575,10 +571,7 @@ def test_create_specialist_pool_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio @@ -609,10 +602,7 @@ async def test_create_specialist_pool_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_create_specialist_pool_flattened(): @@ -717,8 +707,7 @@ def test_get_specialist_pool( request_type=specialist_pool_service.GetSpecialistPoolRequest, ): client = SpecialistPoolServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -771,8 +760,7 @@ async def test_get_specialist_pool_async( request_type=specialist_pool_service.GetSpecialistPoolRequest, ): client = SpecialistPoolServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -846,10 +834,7 @@ def test_get_specialist_pool_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio @@ -880,10 +865,7 @@ async def test_get_specialist_pool_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_specialist_pool_flattened(): @@ -900,9 +882,7 @@ def test_get_specialist_pool_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_specialist_pool( - name="name_value", - ) + client.get_specialist_pool(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -921,8 +901,7 @@ def test_get_specialist_pool_flattened_error(): # fields is an error. with pytest.raises(ValueError): client.get_specialist_pool( - specialist_pool_service.GetSpecialistPoolRequest(), - name="name_value", + specialist_pool_service.GetSpecialistPoolRequest(), name="name_value", ) @@ -944,9 +923,7 @@ async def test_get_specialist_pool_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_specialist_pool( - name="name_value", - ) + response = await client.get_specialist_pool(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -966,8 +943,7 @@ async def test_get_specialist_pool_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.get_specialist_pool( - specialist_pool_service.GetSpecialistPoolRequest(), - name="name_value", + specialist_pool_service.GetSpecialistPoolRequest(), name="name_value", ) @@ -976,8 +952,7 @@ def test_list_specialist_pools( request_type=specialist_pool_service.ListSpecialistPoolsRequest, ): client = SpecialistPoolServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1018,8 +993,7 @@ async def test_list_specialist_pools_async( request_type=specialist_pool_service.ListSpecialistPoolsRequest, ): client = SpecialistPoolServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1081,10 +1055,7 @@ def test_list_specialist_pools_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio @@ -1115,10 +1086,7 @@ async def test_list_specialist_pools_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_specialist_pools_flattened(): @@ -1135,9 +1103,7 @@ def test_list_specialist_pools_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_specialist_pools( - parent="parent_value", - ) + client.list_specialist_pools(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1156,8 +1122,7 @@ def test_list_specialist_pools_flattened_error(): # fields is an error. with pytest.raises(ValueError): client.list_specialist_pools( - specialist_pool_service.ListSpecialistPoolsRequest(), - parent="parent_value", + specialist_pool_service.ListSpecialistPoolsRequest(), parent="parent_value", ) @@ -1179,9 +1144,7 @@ async def test_list_specialist_pools_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_specialist_pools( - parent="parent_value", - ) + response = await client.list_specialist_pools(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1201,15 +1164,12 @@ async def test_list_specialist_pools_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.list_specialist_pools( - specialist_pool_service.ListSpecialistPoolsRequest(), - parent="parent_value", + specialist_pool_service.ListSpecialistPoolsRequest(), parent="parent_value", ) def test_list_specialist_pools_pager(): - client = SpecialistPoolServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = SpecialistPoolServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1226,13 +1186,10 @@ def test_list_specialist_pools_pager(): next_page_token="abc", ), specialist_pool_service.ListSpecialistPoolsResponse( - specialist_pools=[], - next_page_token="def", + specialist_pools=[], next_page_token="def", ), specialist_pool_service.ListSpecialistPoolsResponse( - specialist_pools=[ - specialist_pool.SpecialistPool(), - ], + specialist_pools=[specialist_pool.SpecialistPool(),], next_page_token="ghi", ), specialist_pool_service.ListSpecialistPoolsResponse( @@ -1258,9 +1215,7 @@ def test_list_specialist_pools_pager(): def test_list_specialist_pools_pages(): - client = SpecialistPoolServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = SpecialistPoolServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1277,13 +1232,10 @@ def test_list_specialist_pools_pages(): next_page_token="abc", ), specialist_pool_service.ListSpecialistPoolsResponse( - specialist_pools=[], - next_page_token="def", + specialist_pools=[], next_page_token="def", ), specialist_pool_service.ListSpecialistPoolsResponse( - specialist_pools=[ - specialist_pool.SpecialistPool(), - ], + specialist_pools=[specialist_pool.SpecialistPool(),], next_page_token="ghi", ), specialist_pool_service.ListSpecialistPoolsResponse( @@ -1322,13 +1274,10 @@ async def test_list_specialist_pools_async_pager(): next_page_token="abc", ), specialist_pool_service.ListSpecialistPoolsResponse( - specialist_pools=[], - next_page_token="def", + specialist_pools=[], next_page_token="def", ), specialist_pool_service.ListSpecialistPoolsResponse( - specialist_pools=[ - specialist_pool.SpecialistPool(), - ], + specialist_pools=[specialist_pool.SpecialistPool(),], next_page_token="ghi", ), specialist_pool_service.ListSpecialistPoolsResponse( @@ -1339,9 +1288,7 @@ async def test_list_specialist_pools_async_pager(): ), RuntimeError, ) - async_pager = await client.list_specialist_pools( - request={}, - ) + async_pager = await client.list_specialist_pools(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -1374,13 +1321,10 @@ async def test_list_specialist_pools_async_pages(): next_page_token="abc", ), specialist_pool_service.ListSpecialistPoolsResponse( - specialist_pools=[], - next_page_token="def", + specialist_pools=[], next_page_token="def", ), specialist_pool_service.ListSpecialistPoolsResponse( - specialist_pools=[ - specialist_pool.SpecialistPool(), - ], + specialist_pools=[specialist_pool.SpecialistPool(),], next_page_token="ghi", ), specialist_pool_service.ListSpecialistPoolsResponse( @@ -1403,8 +1347,7 @@ def test_delete_specialist_pool( request_type=specialist_pool_service.DeleteSpecialistPoolRequest, ): client = SpecialistPoolServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1440,8 +1383,7 @@ async def test_delete_specialist_pool_async( request_type=specialist_pool_service.DeleteSpecialistPoolRequest, ): client = SpecialistPoolServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1499,10 +1441,7 @@ def test_delete_specialist_pool_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio @@ -1533,10 +1472,7 @@ async def test_delete_specialist_pool_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_delete_specialist_pool_flattened(): @@ -1553,9 +1489,7 @@ def test_delete_specialist_pool_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_specialist_pool( - name="name_value", - ) + client.delete_specialist_pool(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1574,8 +1508,7 @@ def test_delete_specialist_pool_flattened_error(): # fields is an error. with pytest.raises(ValueError): client.delete_specialist_pool( - specialist_pool_service.DeleteSpecialistPoolRequest(), - name="name_value", + specialist_pool_service.DeleteSpecialistPoolRequest(), name="name_value", ) @@ -1597,9 +1530,7 @@ async def test_delete_specialist_pool_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_specialist_pool( - name="name_value", - ) + response = await client.delete_specialist_pool(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1619,8 +1550,7 @@ async def test_delete_specialist_pool_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.delete_specialist_pool( - specialist_pool_service.DeleteSpecialistPoolRequest(), - name="name_value", + specialist_pool_service.DeleteSpecialistPoolRequest(), name="name_value", ) @@ -1629,8 +1559,7 @@ def test_update_specialist_pool( request_type=specialist_pool_service.UpdateSpecialistPoolRequest, ): client = SpecialistPoolServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1666,8 +1595,7 @@ async def test_update_specialist_pool_async( request_type=specialist_pool_service.UpdateSpecialistPoolRequest, ): client = SpecialistPoolServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1869,8 +1797,7 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = SpecialistPoolServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -1889,8 +1816,7 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = SpecialistPoolServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + client_options={"scopes": ["1", "2"]}, transport=transport, ) @@ -1938,10 +1864,7 @@ def test_transport_grpc_default(): client = SpecialistPoolServiceClient( credentials=credentials.AnonymousCredentials(), ) - assert isinstance( - client.transport, - transports.SpecialistPoolServiceGrpcTransport, - ) + assert isinstance(client.transport, transports.SpecialistPoolServiceGrpcTransport,) def test_specialist_pool_service_base_transport_error(): @@ -1992,8 +1915,7 @@ def test_specialist_pool_service_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (credentials.AnonymousCredentials(), None) transport = transports.SpecialistPoolServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", + credentials_file="credentials.json", quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -2063,8 +1985,7 @@ def test_specialist_pool_service_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.SpecialistPoolServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2076,8 +1997,7 @@ def test_specialist_pool_service_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.SpecialistPoolServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2174,16 +2094,12 @@ def test_specialist_pool_service_transport_channel_mtls_with_adc(transport_class def test_specialist_pool_service_grpc_lro_client(): client = SpecialistPoolServiceClient( - credentials=credentials.AnonymousCredentials(), - transport="grpc", + credentials=credentials.AnonymousCredentials(), transport="grpc", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) + assert isinstance(transport.operations_client, operations_v1.OperationsClient,) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -2191,16 +2107,12 @@ def test_specialist_pool_service_grpc_lro_client(): def test_specialist_pool_service_grpc_lro_async_client(): client = SpecialistPoolServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport="grpc_asyncio", + credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) + assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -2212,9 +2124,7 @@ def test_specialist_pool_path(): specialist_pool = "whelk" expected = "projects/{project}/locations/{location}/specialistPools/{specialist_pool}".format( - project=project, - location=location, - specialist_pool=specialist_pool, + project=project, location=location, specialist_pool=specialist_pool, ) actual = SpecialistPoolServiceClient.specialist_pool_path( project, location, specialist_pool @@ -2259,9 +2169,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "winkle" - expected = "folders/{folder}".format( - folder=folder, - ) + expected = "folders/{folder}".format(folder=folder,) actual = SpecialistPoolServiceClient.common_folder_path(folder) assert expected == actual @@ -2280,9 +2188,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "scallop" - expected = "organizations/{organization}".format( - organization=organization, - ) + expected = "organizations/{organization}".format(organization=organization,) actual = SpecialistPoolServiceClient.common_organization_path(organization) assert expected == actual @@ -2301,9 +2207,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "squid" - expected = "projects/{project}".format( - project=project, - ) + expected = "projects/{project}".format(project=project,) actual = SpecialistPoolServiceClient.common_project_path(project) assert expected == actual @@ -2324,8 +2228,7 @@ def test_common_location_path(): location = "octopus" expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, + project=project, location=location, ) actual = SpecialistPoolServiceClient.common_location_path(project, location) assert expected == actual @@ -2350,8 +2253,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.SpecialistPoolServiceTransport, "_prep_wrapped_messages" ) as prep: client = SpecialistPoolServiceClient( - credentials=credentials.AnonymousCredentials(), - client_info=client_info, + credentials=credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2360,7 +2262,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = SpecialistPoolServiceClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), - client_info=client_info, + credentials=credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) From 385589ad910391480232ebe7a634a68420d3c754 Mon Sep 17 00:00:00 2001 From: Eric Schmidt Date: Tue, 15 Dec 2020 16:22:39 -0800 Subject: [PATCH 5/5] fix: now with more nox! --- docs/conf.py | 5 +- .../types/image_classification.py | 4 +- .../types/image_object_detection.py | 4 +- .../types/image_segmentation.py | 4 +- .../types/text_classification.py | 4 +- .../instance_v1beta1/types/text_extraction.py | 4 +- .../instance_v1beta1/types/text_sentiment.py | 4 +- .../types/video_action_recognition.py | 4 +- .../types/video_classification.py | 4 +- .../types/video_object_tracking.py | 4 +- .../types/image_classification.py | 4 +- .../types/image_object_detection.py | 4 +- .../types/image_segmentation.py | 4 +- .../types/video_action_recognition.py | 4 +- .../types/video_classification.py | 4 +- .../types/video_object_tracking.py | 4 +- .../types/classification.py | 4 +- .../types/image_object_detection.py | 10 +- .../types/image_segmentation.py | 4 +- .../types/tabular_classification.py | 4 +- .../types/tabular_regression.py | 4 +- .../types/text_extraction.py | 4 +- .../types/text_sentiment.py | 10 +- .../types/time_series_forecasting.py | 4 +- .../types/video_action_recognition.py | 20 +- .../types/video_classification.py | 20 +- .../types/video_object_tracking.py | 64 +- .../types/automl_forecasting.py | 20 +- .../types/automl_image_classification.py | 18 +- .../types/automl_image_object_detection.py | 18 +- .../types/automl_image_segmentation.py | 18 +- .../definition_v1beta1/types/automl_tables.py | 22 +- .../types/automl_text_classification.py | 9 +- .../types/automl_text_extraction.py | 11 +- .../types/automl_text_sentiment.py | 11 +- .../types/automl_video_action_recognition.py | 15 +- .../types/automl_video_classification.py | 15 +- .../types/automl_video_object_tracking.py | 15 +- .../export_evaluated_data_items_config.py | 4 +- .../services/dataset_service/async_client.py | 85 +- .../services/dataset_service/client.py | 157 +- .../dataset_service/transports/base.py | 40 +- .../services/endpoint_service/async_client.py | 54 +- .../services/endpoint_service/client.py | 115 +- .../endpoint_service/transports/base.py | 28 +- .../services/job_service/async_client.py | 152 +- .../services/job_service/client.py | 239 +-- .../services/job_service/transports/base.py | 20 +- .../migration_service/async_client.py | 19 +- .../services/migration_service/client.py | 126 +- .../services/model_service/async_client.py | 85 +- .../services/model_service/client.py | 170 +-- .../services/model_service/transports/base.py | 28 +- .../services/pipeline_service/async_client.py | 38 +- .../services/pipeline_service/client.py | 107 +- .../prediction_service/async_client.py | 14 +- .../services/prediction_service/client.py | 63 +- .../prediction_service/transports/base.py | 8 +- .../specialist_pool_service/async_client.py | 40 +- .../specialist_pool_service/client.py | 89 +- .../transports/base.py | 4 +- .../types/accelerator_type.py | 5 +- .../aiplatform_v1beta1/types/annotation.py | 27 +- .../types/annotation_spec.py | 19 +- .../types/batch_prediction_job.py | 100 +- .../types/completion_stats.py | 5 +- .../aiplatform_v1beta1/types/custom_job.py | 80 +- .../aiplatform_v1beta1/types/data_item.py | 23 +- .../types/data_labeling_job.py | 58 +- .../cloud/aiplatform_v1beta1/types/dataset.py | 34 +- .../types/dataset_service.py | 80 +- .../types/deployed_model_ref.py | 5 +- .../aiplatform_v1beta1/types/endpoint.py | 32 +- .../types/endpoint_service.py | 48 +- .../cloud/aiplatform_v1beta1/types/env_var.py | 5 +- .../aiplatform_v1beta1/types/explanation.py | 54 +- .../types/explanation_metadata.py | 32 +- .../types/hyperparameter_tuning_job.py | 57 +- .../aiplatform_v1beta1/types/job_service.py | 50 +- .../aiplatform_v1beta1/types/job_state.py | 5 +- .../types/machine_resources.py | 16 +- .../types/manual_batch_tuning_parameters.py | 4 +- .../types/migratable_resource.py | 33 +- .../types/migration_service.py | 30 +- .../cloud/aiplatform_v1beta1/types/model.py | 69 +- .../types/model_evaluation.py | 21 +- .../types/model_evaluation_slice.py | 25 +- .../aiplatform_v1beta1/types/model_service.py | 78 +- .../aiplatform_v1beta1/types/operation.py | 25 +- .../types/pipeline_service.py | 14 +- .../types/pipeline_state.py | 5 +- .../types/prediction_service.py | 40 +- .../types/specialist_pool.py | 5 +- .../types/specialist_pool_service.py | 32 +- .../cloud/aiplatform_v1beta1/types/study.py | 88 +- .../types/training_pipeline.py | 90 +- .../types/user_action_reference.py | 5 +- noxfile.py | 15 +- .../test_dataset_service.py | 824 +++------- .../test_endpoint_service.py | 477 ++---- .../aiplatform_v1beta1/test_job_service.py | 1323 +++++------------ .../test_migration_service.py | 210 +-- .../aiplatform_v1beta1/test_model_service.py | 788 +++------- .../test_pipeline_service.py | 381 ++--- .../test_prediction_service.py | 109 +- .../test_specialist_pool_service.py | 217 +-- 106 files changed, 1770 insertions(+), 5943 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index a6e4da0270..98e68be241 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -347,10 +347,7 @@ intersphinx_mapping = { "python": ("https://python.readthedocs.org/en/latest/", None), "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), - "google.api_core": ( - "https://googleapis.dev/python/google-api-core/latest/", - None, - ), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), "grpc": ("https://grpc.github.io/grpc/python/", None), "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), } diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/image_classification.py b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/image_classification.py index c484150e69..84b1ef0bbe 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/image_classification.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/image_classification.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.instance", - manifest={ - "ImageClassificationPredictionInstance", - }, + manifest={"ImageClassificationPredictionInstance",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/image_object_detection.py b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/image_object_detection.py index 8455fa581c..79c3efc2c6 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/image_object_detection.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/image_object_detection.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.instance", - manifest={ - "ImageObjectDetectionPredictionInstance", - }, + manifest={"ImageObjectDetectionPredictionInstance",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/image_segmentation.py b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/image_segmentation.py index 497b67b691..5a3232c6d2 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/image_segmentation.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/image_segmentation.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.instance", - manifest={ - "ImageSegmentationPredictionInstance", - }, + manifest={"ImageSegmentationPredictionInstance",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/text_classification.py b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/text_classification.py index 4f196ac220..a615dc7e49 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/text_classification.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/text_classification.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.instance", - manifest={ - "TextClassificationPredictionInstance", - }, + manifest={"TextClassificationPredictionInstance",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/text_extraction.py b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/text_extraction.py index 1077f8b8d7..c6fecf80b7 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/text_extraction.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/text_extraction.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.instance", - manifest={ - "TextExtractionPredictionInstance", - }, + manifest={"TextExtractionPredictionInstance",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/text_sentiment.py b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/text_sentiment.py index 00bd62fdeb..69836d0e96 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/text_sentiment.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/text_sentiment.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.instance", - manifest={ - "TextSentimentPredictionInstance", - }, + manifest={"TextSentimentPredictionInstance",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/video_action_recognition.py b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/video_action_recognition.py index 0e6d5afd6e..89be6318f8 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/video_action_recognition.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/video_action_recognition.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.instance", - manifest={ - "VideoActionRecognitionPredictionInstance", - }, + manifest={"VideoActionRecognitionPredictionInstance",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/video_classification.py b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/video_classification.py index 32c0dff2f7..41ab3bc217 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/video_classification.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/video_classification.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.instance", - manifest={ - "VideoClassificationPredictionInstance", - }, + manifest={"VideoClassificationPredictionInstance",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/video_object_tracking.py b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/video_object_tracking.py index 4c6d0714bb..3729c14816 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/video_object_tracking.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/instance_v1beta1/types/video_object_tracking.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.instance", - manifest={ - "VideoObjectTrackingPredictionInstance", - }, + manifest={"VideoObjectTrackingPredictionInstance",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/image_classification.py b/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/image_classification.py index b8deb2a0c6..681a8c3d87 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/image_classification.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/image_classification.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.params", - manifest={ - "ImageClassificationPredictionParams", - }, + manifest={"ImageClassificationPredictionParams",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/image_object_detection.py b/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/image_object_detection.py index 13bf3059b9..146dd324b7 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/image_object_detection.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/image_object_detection.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.params", - manifest={ - "ImageObjectDetectionPredictionParams", - }, + manifest={"ImageObjectDetectionPredictionParams",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/image_segmentation.py b/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/image_segmentation.py index 3e24237e86..aa11739a61 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/image_segmentation.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/image_segmentation.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.params", - manifest={ - "ImageSegmentationPredictionParams", - }, + manifest={"ImageSegmentationPredictionParams",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/video_action_recognition.py b/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/video_action_recognition.py index 7d8d6e1a82..c1f8f9f3bc 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/video_action_recognition.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/video_action_recognition.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.params", - manifest={ - "VideoActionRecognitionPredictionParams", - }, + manifest={"VideoActionRecognitionPredictionParams",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/video_classification.py b/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/video_classification.py index 80149d426b..1b8d84a7d1 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/video_classification.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/video_classification.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.params", - manifest={ - "VideoClassificationPredictionParams", - }, + manifest={"VideoClassificationPredictionParams",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/video_object_tracking.py b/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/video_object_tracking.py index 8aa3ff8384..4c0b6846bc 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/video_object_tracking.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/params_v1beta1/types/video_object_tracking.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.params", - manifest={ - "VideoObjectTrackingPredictionParams", - }, + manifest={"VideoObjectTrackingPredictionParams",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/classification.py b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/classification.py index 850779b6b7..3bfe82f64e 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/classification.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/classification.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.prediction", - manifest={ - "ClassificationPredictionResult", - }, + manifest={"ClassificationPredictionResult",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/image_object_detection.py b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/image_object_detection.py index 08cd977503..1bf5002c2a 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/image_object_detection.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/image_object_detection.py @@ -23,9 +23,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.prediction", - manifest={ - "ImageObjectDetectionPredictionResult", - }, + manifest={"ImageObjectDetectionPredictionResult",}, ) @@ -60,11 +58,7 @@ class ImageObjectDetectionPredictionResult(proto.Message): confidences = proto.RepeatedField(proto.FLOAT, number=3) - bboxes = proto.RepeatedField( - proto.MESSAGE, - number=4, - message=struct.ListValue, - ) + bboxes = proto.RepeatedField(proto.MESSAGE, number=4, message=struct.ListValue,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/image_segmentation.py b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/image_segmentation.py index a92a3805a3..195dea6f79 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/image_segmentation.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/image_segmentation.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.prediction", - manifest={ - "ImageSegmentationPredictionResult", - }, + manifest={"ImageSegmentationPredictionResult",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/tabular_classification.py b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/tabular_classification.py index 759329db4b..4906ad59a5 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/tabular_classification.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/tabular_classification.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.prediction", - manifest={ - "TabularClassificationPredictionResult", - }, + manifest={"TabularClassificationPredictionResult",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/tabular_regression.py b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/tabular_regression.py index ed7851e3bd..71d535c1f0 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/tabular_regression.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/tabular_regression.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.prediction", - manifest={ - "TabularRegressionPredictionResult", - }, + manifest={"TabularRegressionPredictionResult",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/text_extraction.py b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/text_extraction.py index 5450db2ffb..e3c10b5d75 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/text_extraction.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/text_extraction.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.prediction", - manifest={ - "TextExtractionPredictionResult", - }, + manifest={"TextExtractionPredictionResult",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/text_sentiment.py b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/text_sentiment.py index fcd296366f..192e50419d 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/text_sentiment.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/text_sentiment.py @@ -23,9 +23,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.prediction", - manifest={ - "TextSentimentPredictionResult", - }, + manifest={"TextSentimentPredictionResult",}, ) @@ -64,11 +62,7 @@ class Prediction(proto.Message): message=gcaspi_text_sentiment.TextSentimentPredictionInstance, ) - prediction = proto.Field( - proto.MESSAGE, - number=2, - message=Prediction, - ) + prediction = proto.Field(proto.MESSAGE, number=2, message=Prediction,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/time_series_forecasting.py b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/time_series_forecasting.py index eb30436beb..38bd8e3c85 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/time_series_forecasting.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/time_series_forecasting.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.prediction", - manifest={ - "TimeSeriesForecastingPredictionResult", - }, + manifest={"TimeSeriesForecastingPredictionResult",}, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/video_action_recognition.py b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/video_action_recognition.py index 8105e21a87..f76b51899b 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/video_action_recognition.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/video_action_recognition.py @@ -24,9 +24,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.prediction", - manifest={ - "VideoActionRecognitionPredictionResult", - }, + manifest={"VideoActionRecognitionPredictionResult",}, ) @@ -65,22 +63,12 @@ class VideoActionRecognitionPredictionResult(proto.Message): display_name = proto.Field(proto.STRING, number=2) time_segment_start = proto.Field( - proto.MESSAGE, - number=4, - message=duration.Duration, + proto.MESSAGE, number=4, message=duration.Duration, ) - time_segment_end = proto.Field( - proto.MESSAGE, - number=5, - message=duration.Duration, - ) + time_segment_end = proto.Field(proto.MESSAGE, number=5, message=duration.Duration,) - confidence = proto.Field( - proto.MESSAGE, - number=6, - message=wrappers.FloatValue, - ) + confidence = proto.Field(proto.MESSAGE, number=6, message=wrappers.FloatValue,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/video_classification.py b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/video_classification.py index dbee575ef5..469023b122 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/video_classification.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/video_classification.py @@ -24,9 +24,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.prediction", - manifest={ - "VideoClassificationPredictionResult", - }, + manifest={"VideoClassificationPredictionResult",}, ) @@ -81,22 +79,12 @@ class VideoClassificationPredictionResult(proto.Message): type_ = proto.Field(proto.STRING, number=3) time_segment_start = proto.Field( - proto.MESSAGE, - number=4, - message=duration.Duration, + proto.MESSAGE, number=4, message=duration.Duration, ) - time_segment_end = proto.Field( - proto.MESSAGE, - number=5, - message=duration.Duration, - ) + time_segment_end = proto.Field(proto.MESSAGE, number=5, message=duration.Duration,) - confidence = proto.Field( - proto.MESSAGE, - number=6, - message=wrappers.FloatValue, - ) + confidence = proto.Field(proto.MESSAGE, number=6, message=wrappers.FloatValue,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/video_object_tracking.py b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/video_object_tracking.py index 2a05724028..026f80a325 100644 --- a/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/video_object_tracking.py +++ b/google/cloud/aiplatform/v1beta1/schema/predict/prediction_v1beta1/types/video_object_tracking.py @@ -24,9 +24,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.predict.prediction", - manifest={ - "VideoObjectTrackingPredictionResult", - }, + manifest={"VideoObjectTrackingPredictionResult",}, ) @@ -89,63 +87,29 @@ class Frame(proto.Message): box. """ - time_offset = proto.Field( - proto.MESSAGE, - number=1, - message=duration.Duration, - ) - - x_min = proto.Field( - proto.MESSAGE, - number=2, - message=wrappers.FloatValue, - ) - - x_max = proto.Field( - proto.MESSAGE, - number=3, - message=wrappers.FloatValue, - ) - - y_min = proto.Field( - proto.MESSAGE, - number=4, - message=wrappers.FloatValue, - ) - - y_max = proto.Field( - proto.MESSAGE, - number=5, - message=wrappers.FloatValue, - ) + time_offset = proto.Field(proto.MESSAGE, number=1, message=duration.Duration,) + + x_min = proto.Field(proto.MESSAGE, number=2, message=wrappers.FloatValue,) + + x_max = proto.Field(proto.MESSAGE, number=3, message=wrappers.FloatValue,) + + y_min = proto.Field(proto.MESSAGE, number=4, message=wrappers.FloatValue,) + + y_max = proto.Field(proto.MESSAGE, number=5, message=wrappers.FloatValue,) id = proto.Field(proto.STRING, number=1) display_name = proto.Field(proto.STRING, number=2) time_segment_start = proto.Field( - proto.MESSAGE, - number=3, - message=duration.Duration, + proto.MESSAGE, number=3, message=duration.Duration, ) - time_segment_end = proto.Field( - proto.MESSAGE, - number=4, - message=duration.Duration, - ) + time_segment_end = proto.Field(proto.MESSAGE, number=4, message=duration.Duration,) - confidence = proto.Field( - proto.MESSAGE, - number=5, - message=wrappers.FloatValue, - ) + confidence = proto.Field(proto.MESSAGE, number=5, message=wrappers.FloatValue,) - frames = proto.RepeatedField( - proto.MESSAGE, - number=6, - message=Frame, - ) + frames = proto.RepeatedField(proto.MESSAGE, number=6, message=Frame,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_forecasting.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_forecasting.py index 337138d774..40c549dc5f 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_forecasting.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_forecasting.py @@ -44,16 +44,10 @@ class AutoMlForecasting(proto.Message): The metadata information. """ - inputs = proto.Field( - proto.MESSAGE, - number=1, - message="AutoMlForecastingInputs", - ) + inputs = proto.Field(proto.MESSAGE, number=1, message="AutoMlForecastingInputs",) metadata = proto.Field( - proto.MESSAGE, - number=2, - message="AutoMlForecastingMetadata", + proto.MESSAGE, number=2, message="AutoMlForecastingMetadata", ) @@ -445,9 +439,7 @@ class Period(proto.Message): time_column = proto.Field(proto.STRING, number=3) transformations = proto.RepeatedField( - proto.MESSAGE, - number=4, - message=Transformation, + proto.MESSAGE, number=4, message=Transformation, ) optimization_objective = proto.Field(proto.STRING, number=5) @@ -462,11 +454,7 @@ class Period(proto.Message): time_variant_past_and_future_columns = proto.RepeatedField(proto.STRING, number=10) - period = proto.Field( - proto.MESSAGE, - number=11, - message=Period, - ) + period = proto.Field(proto.MESSAGE, number=11, message=Period,) forecast_window_start = proto.Field(proto.INT64, number=12) diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_classification.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_classification.py index 57fb8fd17c..0ee0394192 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_classification.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_classification.py @@ -40,15 +40,11 @@ class AutoMlImageClassification(proto.Message): """ inputs = proto.Field( - proto.MESSAGE, - number=1, - message="AutoMlImageClassificationInputs", + proto.MESSAGE, number=1, message="AutoMlImageClassificationInputs", ) metadata = proto.Field( - proto.MESSAGE, - number=2, - message="AutoMlImageClassificationMetadata", + proto.MESSAGE, number=2, message="AutoMlImageClassificationMetadata", ) @@ -105,11 +101,7 @@ class ModelType(proto.Enum): MOBILE_TF_VERSATILE_1 = 3 MOBILE_TF_HIGH_ACCURACY_1 = 4 - model_type = proto.Field( - proto.ENUM, - number=1, - enum=ModelType, - ) + model_type = proto.Field(proto.ENUM, number=1, enum=ModelType,) base_model_id = proto.Field(proto.STRING, number=2) @@ -144,9 +136,7 @@ class SuccessfulStopReason(proto.Enum): cost_milli_node_hours = proto.Field(proto.INT64, number=1) successful_stop_reason = proto.Field( - proto.ENUM, - number=2, - enum=SuccessfulStopReason, + proto.ENUM, number=2, enum=SuccessfulStopReason, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_object_detection.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_object_detection.py index 420e4a4a31..3fb9d3ae1d 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_object_detection.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_object_detection.py @@ -40,15 +40,11 @@ class AutoMlImageObjectDetection(proto.Message): """ inputs = proto.Field( - proto.MESSAGE, - number=1, - message="AutoMlImageObjectDetectionInputs", + proto.MESSAGE, number=1, message="AutoMlImageObjectDetectionInputs", ) metadata = proto.Field( - proto.MESSAGE, - number=2, - message="AutoMlImageObjectDetectionMetadata", + proto.MESSAGE, number=2, message="AutoMlImageObjectDetectionMetadata", ) @@ -94,11 +90,7 @@ class ModelType(proto.Enum): MOBILE_TF_VERSATILE_1 = 4 MOBILE_TF_HIGH_ACCURACY_1 = 5 - model_type = proto.Field( - proto.ENUM, - number=1, - enum=ModelType, - ) + model_type = proto.Field(proto.ENUM, number=1, enum=ModelType,) budget_milli_node_hours = proto.Field(proto.INT64, number=2) @@ -129,9 +121,7 @@ class SuccessfulStopReason(proto.Enum): cost_milli_node_hours = proto.Field(proto.INT64, number=1) successful_stop_reason = proto.Field( - proto.ENUM, - number=2, - enum=SuccessfulStopReason, + proto.ENUM, number=2, enum=SuccessfulStopReason, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_segmentation.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_segmentation.py index c767f4272b..0fa3788b11 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_segmentation.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_image_segmentation.py @@ -40,15 +40,11 @@ class AutoMlImageSegmentation(proto.Message): """ inputs = proto.Field( - proto.MESSAGE, - number=1, - message="AutoMlImageSegmentationInputs", + proto.MESSAGE, number=1, message="AutoMlImageSegmentationInputs", ) metadata = proto.Field( - proto.MESSAGE, - number=2, - message="AutoMlImageSegmentationMetadata", + proto.MESSAGE, number=2, message="AutoMlImageSegmentationMetadata", ) @@ -87,11 +83,7 @@ class ModelType(proto.Enum): CLOUD_HIGH_ACCURACY_1 = 1 CLOUD_LOW_ACCURACY_1 = 2 - model_type = proto.Field( - proto.ENUM, - number=1, - enum=ModelType, - ) + model_type = proto.Field(proto.ENUM, number=1, enum=ModelType,) budget_milli_node_hours = proto.Field(proto.INT64, number=2) @@ -122,9 +114,7 @@ class SuccessfulStopReason(proto.Enum): cost_milli_node_hours = proto.Field(proto.INT64, number=1) successful_stop_reason = proto.Field( - proto.ENUM, - number=2, - enum=SuccessfulStopReason, + proto.ENUM, number=2, enum=SuccessfulStopReason, ) diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_tables.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_tables.py index 362b3613fd..55d620b32e 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_tables.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_tables.py @@ -25,11 +25,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.trainingjob.definition", - manifest={ - "AutoMlTables", - "AutoMlTablesInputs", - "AutoMlTablesMetadata", - }, + manifest={"AutoMlTables", "AutoMlTablesInputs", "AutoMlTablesMetadata",}, ) @@ -43,17 +39,9 @@ class AutoMlTables(proto.Message): The metadata information. """ - inputs = proto.Field( - proto.MESSAGE, - number=1, - message="AutoMlTablesInputs", - ) + inputs = proto.Field(proto.MESSAGE, number=1, message="AutoMlTablesInputs",) - metadata = proto.Field( - proto.MESSAGE, - number=2, - message="AutoMlTablesMetadata", - ) + metadata = proto.Field(proto.MESSAGE, number=2, message="AutoMlTablesMetadata",) class AutoMlTablesInputs(proto.Message): @@ -424,9 +412,7 @@ class TextArrayTransformation(proto.Message): target_column = proto.Field(proto.STRING, number=2) transformations = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=Transformation, + proto.MESSAGE, number=3, message=Transformation, ) optimization_objective = proto.Field(proto.STRING, number=4) diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_text_classification.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_text_classification.py index 8b7c29d198..ca75734600 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_text_classification.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_text_classification.py @@ -20,10 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.trainingjob.definition", - manifest={ - "AutoMlTextClassification", - "AutoMlTextClassificationInputs", - }, + manifest={"AutoMlTextClassification", "AutoMlTextClassificationInputs",}, ) @@ -37,9 +34,7 @@ class AutoMlTextClassification(proto.Message): """ inputs = proto.Field( - proto.MESSAGE, - number=1, - message="AutoMlTextClassificationInputs", + proto.MESSAGE, number=1, message="AutoMlTextClassificationInputs", ) diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_text_extraction.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_text_extraction.py index c1e44e4630..336509af22 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_text_extraction.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_text_extraction.py @@ -20,10 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.trainingjob.definition", - manifest={ - "AutoMlTextExtraction", - "AutoMlTextExtractionInputs", - }, + manifest={"AutoMlTextExtraction", "AutoMlTextExtractionInputs",}, ) @@ -36,11 +33,7 @@ class AutoMlTextExtraction(proto.Message): The input parameters of this TrainingJob. """ - inputs = proto.Field( - proto.MESSAGE, - number=1, - message="AutoMlTextExtractionInputs", - ) + inputs = proto.Field(proto.MESSAGE, number=1, message="AutoMlTextExtractionInputs",) class AutoMlTextExtractionInputs(proto.Message): diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_text_sentiment.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_text_sentiment.py index d1b936a361..d5de97e2b2 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_text_sentiment.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_text_sentiment.py @@ -20,10 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.trainingjob.definition", - manifest={ - "AutoMlTextSentiment", - "AutoMlTextSentimentInputs", - }, + manifest={"AutoMlTextSentiment", "AutoMlTextSentimentInputs",}, ) @@ -36,11 +33,7 @@ class AutoMlTextSentiment(proto.Message): The input parameters of this TrainingJob. """ - inputs = proto.Field( - proto.MESSAGE, - number=1, - message="AutoMlTextSentimentInputs", - ) + inputs = proto.Field(proto.MESSAGE, number=1, message="AutoMlTextSentimentInputs",) class AutoMlTextSentimentInputs(proto.Message): diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_action_recognition.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_action_recognition.py index 0c5ae5f629..d6969d93c6 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_action_recognition.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_action_recognition.py @@ -20,10 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.trainingjob.definition", - manifest={ - "AutoMlVideoActionRecognition", - "AutoMlVideoActionRecognitionInputs", - }, + manifest={"AutoMlVideoActionRecognition", "AutoMlVideoActionRecognitionInputs",}, ) @@ -37,9 +34,7 @@ class AutoMlVideoActionRecognition(proto.Message): """ inputs = proto.Field( - proto.MESSAGE, - number=1, - message="AutoMlVideoActionRecognitionInputs", + proto.MESSAGE, number=1, message="AutoMlVideoActionRecognitionInputs", ) @@ -57,11 +52,7 @@ class ModelType(proto.Enum): CLOUD = 1 MOBILE_VERSATILE_1 = 2 - model_type = proto.Field( - proto.ENUM, - number=1, - enum=ModelType, - ) + model_type = proto.Field(proto.ENUM, number=1, enum=ModelType,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_classification.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_classification.py index 4e06caf015..3164544d47 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_classification.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_classification.py @@ -20,10 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.trainingjob.definition", - manifest={ - "AutoMlVideoClassification", - "AutoMlVideoClassificationInputs", - }, + manifest={"AutoMlVideoClassification", "AutoMlVideoClassificationInputs",}, ) @@ -37,9 +34,7 @@ class AutoMlVideoClassification(proto.Message): """ inputs = proto.Field( - proto.MESSAGE, - number=1, - message="AutoMlVideoClassificationInputs", + proto.MESSAGE, number=1, message="AutoMlVideoClassificationInputs", ) @@ -57,11 +52,7 @@ class ModelType(proto.Enum): CLOUD = 1 MOBILE_VERSATILE_1 = 2 - model_type = proto.Field( - proto.ENUM, - number=1, - enum=ModelType, - ) + model_type = proto.Field(proto.ENUM, number=1, enum=ModelType,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_object_tracking.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_object_tracking.py index e351db59d3..0fd8c7ec7a 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_object_tracking.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/automl_video_object_tracking.py @@ -20,10 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.trainingjob.definition", - manifest={ - "AutoMlVideoObjectTracking", - "AutoMlVideoObjectTrackingInputs", - }, + manifest={"AutoMlVideoObjectTracking", "AutoMlVideoObjectTrackingInputs",}, ) @@ -37,9 +34,7 @@ class AutoMlVideoObjectTracking(proto.Message): """ inputs = proto.Field( - proto.MESSAGE, - number=1, - message="AutoMlVideoObjectTrackingInputs", + proto.MESSAGE, number=1, message="AutoMlVideoObjectTrackingInputs", ) @@ -61,11 +56,7 @@ class ModelType(proto.Enum): MOBILE_JETSON_VERSATILE_1 = 5 MOBILE_JETSON_LOW_LATENCY_1 = 6 - model_type = proto.Field( - proto.ENUM, - number=1, - enum=ModelType, - ) + model_type = proto.Field(proto.ENUM, number=1, enum=ModelType,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/export_evaluated_data_items_config.py b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/export_evaluated_data_items_config.py index 4d8070c737..29bc547adf 100644 --- a/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/export_evaluated_data_items_config.py +++ b/google/cloud/aiplatform/v1beta1/schema/trainingjob/definition_v1beta1/types/export_evaluated_data_items_config.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1.schema.trainingjob.definition", - manifest={ - "ExportEvaluatedDataItemsConfig", - }, + manifest={"ExportEvaluatedDataItemsConfig",}, ) diff --git a/google/cloud/aiplatform_v1beta1/services/dataset_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/dataset_service/async_client.py index d0139b3003..1927709f30 100644 --- a/google/cloud/aiplatform_v1beta1/services/dataset_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/dataset_service/async_client.py @@ -239,12 +239,7 @@ async def create_dataset( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -324,12 +319,7 @@ async def get_dataset( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -419,12 +409,7 @@ async def update_dataset( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -499,20 +484,12 @@ async def list_datasets( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListDatasetsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -601,12 +578,7 @@ async def delete_dataset( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -701,12 +673,7 @@ async def import_data( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -799,12 +766,7 @@ async def export_data( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -888,20 +850,12 @@ async def list_data_items( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListDataItemsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -976,12 +930,7 @@ async def get_annotation_spec( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -1058,20 +1007,12 @@ async def list_annotations( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListAnnotationsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. diff --git a/google/cloud/aiplatform_v1beta1/services/dataset_service/client.py b/google/cloud/aiplatform_v1beta1/services/dataset_service/client.py index b97aa5385c..1e63153291 100644 --- a/google/cloud/aiplatform_v1beta1/services/dataset_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/dataset_service/client.py @@ -66,10 +66,7 @@ class DatasetServiceClientMeta(type): _transport_registry["grpc"] = DatasetServiceGrpcTransport _transport_registry["grpc_asyncio"] = DatasetServiceGrpcAsyncIOTransport - def get_transport_class( - cls, - label: str = None, - ) -> Type[DatasetServiceTransport]: + def get_transport_class(cls, label: str = None,) -> Type[DatasetServiceTransport]: """Return an appropriate transport class. Args: @@ -156,11 +153,7 @@ def transport(self) -> DatasetServiceTransport: @staticmethod def annotation_path( - project: str, - location: str, - dataset: str, - data_item: str, - annotation: str, + project: str, location: str, dataset: str, data_item: str, annotation: str, ) -> str: """Return a fully-qualified annotation string.""" return "projects/{project}/locations/{location}/datasets/{dataset}/dataItems/{data_item}/annotations/{annotation}".format( @@ -182,10 +175,7 @@ def parse_annotation_path(path: str) -> Dict[str, str]: @staticmethod def annotation_spec_path( - project: str, - location: str, - dataset: str, - annotation_spec: str, + project: str, location: str, dataset: str, annotation_spec: str, ) -> str: """Return a fully-qualified annotation_spec string.""" return "projects/{project}/locations/{location}/datasets/{dataset}/annotationSpecs/{annotation_spec}".format( @@ -206,17 +196,11 @@ def parse_annotation_spec_path(path: str) -> Dict[str, str]: @staticmethod def data_item_path( - project: str, - location: str, - dataset: str, - data_item: str, + project: str, location: str, dataset: str, data_item: str, ) -> str: """Return a fully-qualified data_item string.""" return "projects/{project}/locations/{location}/datasets/{dataset}/dataItems/{data_item}".format( - project=project, - location=location, - dataset=dataset, - data_item=data_item, + project=project, location=location, dataset=dataset, data_item=data_item, ) @staticmethod @@ -229,16 +213,10 @@ def parse_data_item_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def dataset_path( - project: str, - location: str, - dataset: str, - ) -> str: + def dataset_path(project: str, location: str, dataset: str,) -> str: """Return a fully-qualified dataset string.""" return "projects/{project}/locations/{location}/datasets/{dataset}".format( - project=project, - location=location, - dataset=dataset, + project=project, location=location, dataset=dataset, ) @staticmethod @@ -251,9 +229,7 @@ def parse_dataset_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_billing_account_path( - billing_account: str, - ) -> str: + def common_billing_account_path(billing_account: str,) -> str: """Return a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -266,13 +242,9 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path( - folder: str, - ) -> str: + def common_folder_path(folder: str,) -> str: """Return a fully-qualified folder string.""" - return "folders/{folder}".format( - folder=folder, - ) + return "folders/{folder}".format(folder=folder,) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -281,13 +253,9 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path( - organization: str, - ) -> str: + def common_organization_path(organization: str,) -> str: """Return a fully-qualified organization string.""" - return "organizations/{organization}".format( - organization=organization, - ) + return "organizations/{organization}".format(organization=organization,) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -296,13 +264,9 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path( - project: str, - ) -> str: + def common_project_path(project: str,) -> str: """Return a fully-qualified project string.""" - return "projects/{project}".format( - project=project, - ) + return "projects/{project}".format(project=project,) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -311,14 +275,10 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path( - project: str, - location: str, - ) -> str: + def common_location_path(project: str, location: str,) -> str: """Return a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, - location=location, + project=project, location=location, ) @staticmethod @@ -524,12 +484,7 @@ def create_dataset( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -610,12 +565,7 @@ def get_dataset( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -706,12 +656,7 @@ def update_dataset( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -787,20 +732,12 @@ def list_datasets( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListDatasetsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -890,12 +827,7 @@ def delete_dataset( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -991,12 +923,7 @@ def import_data( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -1090,12 +1017,7 @@ def export_data( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -1180,20 +1102,12 @@ def list_data_items( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListDataItemsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -1269,12 +1183,7 @@ def get_annotation_spec( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -1352,20 +1261,12 @@ def list_annotations( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListAnnotationsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. diff --git a/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/base.py index 6d7a5dc0c3..583e9864cd 100644 --- a/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/dataset_service/transports/base.py @@ -112,54 +112,34 @@ def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { self.create_dataset: gapic_v1.method.wrap_method( - self.create_dataset, - default_timeout=5.0, - client_info=client_info, + self.create_dataset, default_timeout=5.0, client_info=client_info, ), self.get_dataset: gapic_v1.method.wrap_method( - self.get_dataset, - default_timeout=5.0, - client_info=client_info, + self.get_dataset, default_timeout=5.0, client_info=client_info, ), self.update_dataset: gapic_v1.method.wrap_method( - self.update_dataset, - default_timeout=5.0, - client_info=client_info, + self.update_dataset, default_timeout=5.0, client_info=client_info, ), self.list_datasets: gapic_v1.method.wrap_method( - self.list_datasets, - default_timeout=5.0, - client_info=client_info, + self.list_datasets, default_timeout=5.0, client_info=client_info, ), self.delete_dataset: gapic_v1.method.wrap_method( - self.delete_dataset, - default_timeout=5.0, - client_info=client_info, + self.delete_dataset, default_timeout=5.0, client_info=client_info, ), self.import_data: gapic_v1.method.wrap_method( - self.import_data, - default_timeout=5.0, - client_info=client_info, + self.import_data, default_timeout=5.0, client_info=client_info, ), self.export_data: gapic_v1.method.wrap_method( - self.export_data, - default_timeout=5.0, - client_info=client_info, + self.export_data, default_timeout=5.0, client_info=client_info, ), self.list_data_items: gapic_v1.method.wrap_method( - self.list_data_items, - default_timeout=5.0, - client_info=client_info, + self.list_data_items, default_timeout=5.0, client_info=client_info, ), self.get_annotation_spec: gapic_v1.method.wrap_method( - self.get_annotation_spec, - default_timeout=5.0, - client_info=client_info, + self.get_annotation_spec, default_timeout=5.0, client_info=client_info, ), self.list_annotations: gapic_v1.method.wrap_method( - self.list_annotations, - default_timeout=5.0, - client_info=client_info, + self.list_annotations, default_timeout=5.0, client_info=client_info, ), } diff --git a/google/cloud/aiplatform_v1beta1/services/endpoint_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/endpoint_service/async_client.py index 3afd01ea0c..9c6af3bd16 100644 --- a/google/cloud/aiplatform_v1beta1/services/endpoint_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/endpoint_service/async_client.py @@ -230,12 +230,7 @@ async def create_endpoint( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -316,12 +311,7 @@ async def get_endpoint( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -397,20 +387,12 @@ async def list_endpoints( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListEndpointsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -495,12 +477,7 @@ async def update_endpoint( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -588,12 +565,7 @@ async def delete_endpoint( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -717,12 +689,7 @@ async def deploy_model( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -837,12 +804,7 @@ async def undeploy_model( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( diff --git a/google/cloud/aiplatform_v1beta1/services/endpoint_service/client.py b/google/cloud/aiplatform_v1beta1/services/endpoint_service/client.py index 28a8f6ab78..5ea003b827 100644 --- a/google/cloud/aiplatform_v1beta1/services/endpoint_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/endpoint_service/client.py @@ -62,10 +62,7 @@ class EndpointServiceClientMeta(type): _transport_registry["grpc"] = EndpointServiceGrpcTransport _transport_registry["grpc_asyncio"] = EndpointServiceGrpcAsyncIOTransport - def get_transport_class( - cls, - label: str = None, - ) -> Type[EndpointServiceTransport]: + def get_transport_class(cls, label: str = None,) -> Type[EndpointServiceTransport]: """Return an appropriate transport class. Args: @@ -151,16 +148,10 @@ def transport(self) -> EndpointServiceTransport: return self._transport @staticmethod - def endpoint_path( - project: str, - location: str, - endpoint: str, - ) -> str: + def endpoint_path(project: str, location: str, endpoint: str,) -> str: """Return a fully-qualified endpoint string.""" return "projects/{project}/locations/{location}/endpoints/{endpoint}".format( - project=project, - location=location, - endpoint=endpoint, + project=project, location=location, endpoint=endpoint, ) @staticmethod @@ -173,16 +164,10 @@ def parse_endpoint_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def model_path( - project: str, - location: str, - model: str, - ) -> str: + def model_path(project: str, location: str, model: str,) -> str: """Return a fully-qualified model string.""" return "projects/{project}/locations/{location}/models/{model}".format( - project=project, - location=location, - model=model, + project=project, location=location, model=model, ) @staticmethod @@ -195,9 +180,7 @@ def parse_model_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_billing_account_path( - billing_account: str, - ) -> str: + def common_billing_account_path(billing_account: str,) -> str: """Return a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -210,13 +193,9 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path( - folder: str, - ) -> str: + def common_folder_path(folder: str,) -> str: """Return a fully-qualified folder string.""" - return "folders/{folder}".format( - folder=folder, - ) + return "folders/{folder}".format(folder=folder,) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -225,13 +204,9 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path( - organization: str, - ) -> str: + def common_organization_path(organization: str,) -> str: """Return a fully-qualified organization string.""" - return "organizations/{organization}".format( - organization=organization, - ) + return "organizations/{organization}".format(organization=organization,) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -240,13 +215,9 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path( - project: str, - ) -> str: + def common_project_path(project: str,) -> str: """Return a fully-qualified project string.""" - return "projects/{project}".format( - project=project, - ) + return "projects/{project}".format(project=project,) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -255,14 +226,10 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path( - project: str, - location: str, - ) -> str: + def common_location_path(project: str, location: str,) -> str: """Return a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, - location=location, + project=project, location=location, ) @staticmethod @@ -469,12 +436,7 @@ def create_endpoint( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -556,12 +518,7 @@ def get_endpoint( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -638,20 +595,12 @@ def list_endpoints( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListEndpointsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -737,12 +686,7 @@ def update_endpoint( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -831,12 +775,7 @@ def delete_endpoint( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -961,12 +900,7 @@ def deploy_model( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -1082,12 +1016,7 @@ def undeploy_model( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = ga_operation.from_gapic( diff --git a/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/base.py index 8608b11624..88b2b17c57 100644 --- a/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/endpoint_service/transports/base.py @@ -111,39 +111,25 @@ def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { self.create_endpoint: gapic_v1.method.wrap_method( - self.create_endpoint, - default_timeout=5.0, - client_info=client_info, + self.create_endpoint, default_timeout=5.0, client_info=client_info, ), self.get_endpoint: gapic_v1.method.wrap_method( - self.get_endpoint, - default_timeout=5.0, - client_info=client_info, + self.get_endpoint, default_timeout=5.0, client_info=client_info, ), self.list_endpoints: gapic_v1.method.wrap_method( - self.list_endpoints, - default_timeout=5.0, - client_info=client_info, + self.list_endpoints, default_timeout=5.0, client_info=client_info, ), self.update_endpoint: gapic_v1.method.wrap_method( - self.update_endpoint, - default_timeout=5.0, - client_info=client_info, + self.update_endpoint, default_timeout=5.0, client_info=client_info, ), self.delete_endpoint: gapic_v1.method.wrap_method( - self.delete_endpoint, - default_timeout=5.0, - client_info=client_info, + self.delete_endpoint, default_timeout=5.0, client_info=client_info, ), self.deploy_model: gapic_v1.method.wrap_method( - self.deploy_model, - default_timeout=5.0, - client_info=client_info, + self.deploy_model, default_timeout=5.0, client_info=client_info, ), self.undeploy_model: gapic_v1.method.wrap_method( - self.undeploy_model, - default_timeout=5.0, - client_info=client_info, + self.undeploy_model, default_timeout=5.0, client_info=client_info, ), } diff --git a/google/cloud/aiplatform_v1beta1/services/job_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/job_service/async_client.py index 258cd49a51..2a24748d11 100644 --- a/google/cloud/aiplatform_v1beta1/services/job_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/job_service/async_client.py @@ -263,12 +263,7 @@ async def create_custom_job( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -346,12 +341,7 @@ async def get_custom_job( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -427,20 +417,12 @@ async def list_custom_jobs( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListCustomJobsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -529,12 +511,7 @@ async def delete_custom_job( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -621,10 +598,7 @@ async def cancel_custom_job( # Send the request. await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, + request, retry=retry, timeout=timeout, metadata=metadata, ) async def create_data_labeling_job( @@ -704,12 +678,7 @@ async def create_data_labeling_job( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -783,12 +752,7 @@ async def get_data_labeling_job( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -863,20 +827,12 @@ async def list_data_labeling_jobs( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListDataLabelingJobsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -966,12 +922,7 @@ async def delete_data_labeling_job( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -1048,10 +999,7 @@ async def cancel_data_labeling_job( # Send the request. await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, + request, retry=retry, timeout=timeout, metadata=metadata, ) async def create_hyperparameter_tuning_job( @@ -1133,12 +1081,7 @@ async def create_hyperparameter_tuning_job( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -1214,12 +1157,7 @@ async def get_hyperparameter_tuning_job( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -1295,20 +1233,12 @@ async def list_hyperparameter_tuning_jobs( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListHyperparameterTuningJobsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -1398,12 +1328,7 @@ async def delete_hyperparameter_tuning_job( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -1493,10 +1418,7 @@ async def cancel_hyperparameter_tuning_job( # Send the request. await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, + request, retry=retry, timeout=timeout, metadata=metadata, ) async def create_batch_prediction_job( @@ -1582,12 +1504,7 @@ async def create_batch_prediction_job( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -1666,12 +1583,7 @@ async def get_batch_prediction_job( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -1747,20 +1659,12 @@ async def list_batch_prediction_jobs( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListBatchPredictionJobsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -1851,12 +1755,7 @@ async def delete_batch_prediction_job( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -1944,10 +1843,7 @@ async def cancel_batch_prediction_job( # Send the request. await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, + request, retry=retry, timeout=timeout, metadata=metadata, ) diff --git a/google/cloud/aiplatform_v1beta1/services/job_service/client.py b/google/cloud/aiplatform_v1beta1/services/job_service/client.py index 9e73a6bf73..a1eb7c38ce 100644 --- a/google/cloud/aiplatform_v1beta1/services/job_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/job_service/client.py @@ -80,10 +80,7 @@ class JobServiceClientMeta(type): _transport_registry["grpc"] = JobServiceGrpcTransport _transport_registry["grpc_asyncio"] = JobServiceGrpcAsyncIOTransport - def get_transport_class( - cls, - label: str = None, - ) -> Type[JobServiceTransport]: + def get_transport_class(cls, label: str = None,) -> Type[JobServiceTransport]: """Return an appropriate transport class. Args: @@ -170,9 +167,7 @@ def transport(self) -> JobServiceTransport: @staticmethod def batch_prediction_job_path( - project: str, - location: str, - batch_prediction_job: str, + project: str, location: str, batch_prediction_job: str, ) -> str: """Return a fully-qualified batch_prediction_job string.""" return "projects/{project}/locations/{location}/batchPredictionJobs/{batch_prediction_job}".format( @@ -191,16 +186,10 @@ def parse_batch_prediction_job_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def custom_job_path( - project: str, - location: str, - custom_job: str, - ) -> str: + def custom_job_path(project: str, location: str, custom_job: str,) -> str: """Return a fully-qualified custom_job string.""" return "projects/{project}/locations/{location}/customJobs/{custom_job}".format( - project=project, - location=location, - custom_job=custom_job, + project=project, location=location, custom_job=custom_job, ) @staticmethod @@ -214,15 +203,11 @@ def parse_custom_job_path(path: str) -> Dict[str, str]: @staticmethod def data_labeling_job_path( - project: str, - location: str, - data_labeling_job: str, + project: str, location: str, data_labeling_job: str, ) -> str: """Return a fully-qualified data_labeling_job string.""" return "projects/{project}/locations/{location}/dataLabelingJobs/{data_labeling_job}".format( - project=project, - location=location, - data_labeling_job=data_labeling_job, + project=project, location=location, data_labeling_job=data_labeling_job, ) @staticmethod @@ -235,16 +220,10 @@ def parse_data_labeling_job_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def dataset_path( - project: str, - location: str, - dataset: str, - ) -> str: + def dataset_path(project: str, location: str, dataset: str,) -> str: """Return a fully-qualified dataset string.""" return "projects/{project}/locations/{location}/datasets/{dataset}".format( - project=project, - location=location, - dataset=dataset, + project=project, location=location, dataset=dataset, ) @staticmethod @@ -258,9 +237,7 @@ def parse_dataset_path(path: str) -> Dict[str, str]: @staticmethod def hyperparameter_tuning_job_path( - project: str, - location: str, - hyperparameter_tuning_job: str, + project: str, location: str, hyperparameter_tuning_job: str, ) -> str: """Return a fully-qualified hyperparameter_tuning_job string.""" return "projects/{project}/locations/{location}/hyperparameterTuningJobs/{hyperparameter_tuning_job}".format( @@ -279,16 +256,10 @@ def parse_hyperparameter_tuning_job_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def model_path( - project: str, - location: str, - model: str, - ) -> str: + def model_path(project: str, location: str, model: str,) -> str: """Return a fully-qualified model string.""" return "projects/{project}/locations/{location}/models/{model}".format( - project=project, - location=location, - model=model, + project=project, location=location, model=model, ) @staticmethod @@ -301,9 +272,7 @@ def parse_model_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_billing_account_path( - billing_account: str, - ) -> str: + def common_billing_account_path(billing_account: str,) -> str: """Return a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -316,13 +285,9 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path( - folder: str, - ) -> str: + def common_folder_path(folder: str,) -> str: """Return a fully-qualified folder string.""" - return "folders/{folder}".format( - folder=folder, - ) + return "folders/{folder}".format(folder=folder,) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -331,13 +296,9 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path( - organization: str, - ) -> str: + def common_organization_path(organization: str,) -> str: """Return a fully-qualified organization string.""" - return "organizations/{organization}".format( - organization=organization, - ) + return "organizations/{organization}".format(organization=organization,) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -346,13 +307,9 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path( - project: str, - ) -> str: + def common_project_path(project: str,) -> str: """Return a fully-qualified project string.""" - return "projects/{project}".format( - project=project, - ) + return "projects/{project}".format(project=project,) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -361,14 +318,10 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path( - project: str, - location: str, - ) -> str: + def common_location_path(project: str, location: str,) -> str: """Return a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, - location=location, + project=project, location=location, ) @staticmethod @@ -578,12 +531,7 @@ def create_custom_job( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -662,12 +610,7 @@ def get_custom_job( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -744,20 +687,12 @@ def list_custom_jobs( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListCustomJobsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -847,12 +782,7 @@ def delete_custom_job( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -940,10 +870,7 @@ def cancel_custom_job( # Send the request. rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, + request, retry=retry, timeout=timeout, metadata=metadata, ) def create_data_labeling_job( @@ -1024,12 +951,7 @@ def create_data_labeling_job( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -1104,12 +1026,7 @@ def get_data_labeling_job( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -1185,20 +1102,12 @@ def list_data_labeling_jobs( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListDataLabelingJobsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -1289,12 +1198,7 @@ def delete_data_labeling_job( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -1372,10 +1276,7 @@ def cancel_data_labeling_job( # Send the request. rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, + request, retry=retry, timeout=timeout, metadata=metadata, ) def create_hyperparameter_tuning_job( @@ -1460,12 +1361,7 @@ def create_hyperparameter_tuning_job( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -1544,12 +1440,7 @@ def get_hyperparameter_tuning_job( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -1628,20 +1519,12 @@ def list_hyperparameter_tuning_jobs( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListHyperparameterTuningJobsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -1734,12 +1617,7 @@ def delete_hyperparameter_tuning_job( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -1832,10 +1710,7 @@ def cancel_hyperparameter_tuning_job( # Send the request. rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, + request, retry=retry, timeout=timeout, metadata=metadata, ) def create_batch_prediction_job( @@ -1924,12 +1799,7 @@ def create_batch_prediction_job( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -2009,12 +1879,7 @@ def get_batch_prediction_job( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -2093,20 +1958,12 @@ def list_batch_prediction_jobs( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListBatchPredictionJobsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -2200,12 +2057,7 @@ def delete_batch_prediction_job( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -2296,10 +2148,7 @@ def cancel_batch_prediction_job( # Send the request. rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, + request, retry=retry, timeout=timeout, metadata=metadata, ) diff --git a/google/cloud/aiplatform_v1beta1/services/job_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/job_service/transports/base.py index 6ac6330d01..abedda51f9 100644 --- a/google/cloud/aiplatform_v1beta1/services/job_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/job_service/transports/base.py @@ -124,29 +124,19 @@ def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { self.create_custom_job: gapic_v1.method.wrap_method( - self.create_custom_job, - default_timeout=5.0, - client_info=client_info, + self.create_custom_job, default_timeout=5.0, client_info=client_info, ), self.get_custom_job: gapic_v1.method.wrap_method( - self.get_custom_job, - default_timeout=5.0, - client_info=client_info, + self.get_custom_job, default_timeout=5.0, client_info=client_info, ), self.list_custom_jobs: gapic_v1.method.wrap_method( - self.list_custom_jobs, - default_timeout=5.0, - client_info=client_info, + self.list_custom_jobs, default_timeout=5.0, client_info=client_info, ), self.delete_custom_job: gapic_v1.method.wrap_method( - self.delete_custom_job, - default_timeout=5.0, - client_info=client_info, + self.delete_custom_job, default_timeout=5.0, client_info=client_info, ), self.cancel_custom_job: gapic_v1.method.wrap_method( - self.cancel_custom_job, - default_timeout=5.0, - client_info=client_info, + self.cancel_custom_job, default_timeout=5.0, client_info=client_info, ), self.create_data_labeling_job: gapic_v1.method.wrap_method( self.create_data_labeling_job, diff --git a/google/cloud/aiplatform_v1beta1/services/migration_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/migration_service/async_client.py index 0f2348ac38..af13c4d4fb 100644 --- a/google/cloud/aiplatform_v1beta1/services/migration_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/migration_service/async_client.py @@ -236,20 +236,12 @@ async def search_migratable_resources( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.SearchMigratableResourcesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -344,12 +336,7 @@ async def batch_migrate_resources( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( diff --git a/google/cloud/aiplatform_v1beta1/services/migration_service/client.py b/google/cloud/aiplatform_v1beta1/services/migration_service/client.py index 116a987f86..bf1f8e5c6b 100644 --- a/google/cloud/aiplatform_v1beta1/services/migration_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/migration_service/client.py @@ -57,10 +57,7 @@ class MigrationServiceClientMeta(type): _transport_registry["grpc"] = MigrationServiceGrpcTransport _transport_registry["grpc_asyncio"] = MigrationServiceGrpcAsyncIOTransport - def get_transport_class( - cls, - label: str = None, - ) -> Type[MigrationServiceTransport]: + def get_transport_class(cls, label: str = None,) -> Type[MigrationServiceTransport]: """Return an appropriate transport class. Args: @@ -150,15 +147,11 @@ def transport(self) -> MigrationServiceTransport: @staticmethod def annotated_dataset_path( - project: str, - dataset: str, - annotated_dataset: str, + project: str, dataset: str, annotated_dataset: str, ) -> str: """Return a fully-qualified annotated_dataset string.""" return "projects/{project}/datasets/{dataset}/annotatedDatasets/{annotated_dataset}".format( - project=project, - dataset=dataset, - annotated_dataset=annotated_dataset, + project=project, dataset=dataset, annotated_dataset=annotated_dataset, ) @staticmethod @@ -171,16 +164,10 @@ def parse_annotated_dataset_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def dataset_path( - project: str, - location: str, - dataset: str, - ) -> str: + def dataset_path(project: str, location: str, dataset: str,) -> str: """Return a fully-qualified dataset string.""" return "projects/{project}/locations/{location}/datasets/{dataset}".format( - project=project, - location=location, - dataset=dataset, + project=project, location=location, dataset=dataset, ) @staticmethod @@ -193,14 +180,10 @@ def parse_dataset_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def dataset_path( - project: str, - dataset: str, - ) -> str: + def dataset_path(project: str, dataset: str,) -> str: """Return a fully-qualified dataset string.""" return "projects/{project}/datasets/{dataset}".format( - project=project, - dataset=dataset, + project=project, dataset=dataset, ) @staticmethod @@ -210,16 +193,10 @@ def parse_dataset_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def dataset_path( - project: str, - location: str, - dataset: str, - ) -> str: + def dataset_path(project: str, location: str, dataset: str,) -> str: """Return a fully-qualified dataset string.""" return "projects/{project}/locations/{location}/datasets/{dataset}".format( - project=project, - location=location, - dataset=dataset, + project=project, location=location, dataset=dataset, ) @staticmethod @@ -232,16 +209,10 @@ def parse_dataset_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def model_path( - project: str, - location: str, - model: str, - ) -> str: + def model_path(project: str, location: str, model: str,) -> str: """Return a fully-qualified model string.""" return "projects/{project}/locations/{location}/models/{model}".format( - project=project, - location=location, - model=model, + project=project, location=location, model=model, ) @staticmethod @@ -254,16 +225,10 @@ def parse_model_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def model_path( - project: str, - location: str, - model: str, - ) -> str: + def model_path(project: str, location: str, model: str,) -> str: """Return a fully-qualified model string.""" return "projects/{project}/locations/{location}/models/{model}".format( - project=project, - location=location, - model=model, + project=project, location=location, model=model, ) @staticmethod @@ -276,16 +241,10 @@ def parse_model_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def version_path( - project: str, - model: str, - version: str, - ) -> str: + def version_path(project: str, model: str, version: str,) -> str: """Return a fully-qualified version string.""" return "projects/{project}/models/{model}/versions/{version}".format( - project=project, - model=model, - version=version, + project=project, model=model, version=version, ) @staticmethod @@ -298,9 +257,7 @@ def parse_version_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_billing_account_path( - billing_account: str, - ) -> str: + def common_billing_account_path(billing_account: str,) -> str: """Return a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -313,13 +270,9 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path( - folder: str, - ) -> str: + def common_folder_path(folder: str,) -> str: """Return a fully-qualified folder string.""" - return "folders/{folder}".format( - folder=folder, - ) + return "folders/{folder}".format(folder=folder,) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -328,13 +281,9 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path( - organization: str, - ) -> str: + def common_organization_path(organization: str,) -> str: """Return a fully-qualified organization string.""" - return "organizations/{organization}".format( - organization=organization, - ) + return "organizations/{organization}".format(organization=organization,) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -343,13 +292,9 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path( - project: str, - ) -> str: + def common_project_path(project: str,) -> str: """Return a fully-qualified project string.""" - return "projects/{project}".format( - project=project, - ) + return "projects/{project}".format(project=project,) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -358,14 +303,10 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path( - project: str, - location: str, - ) -> str: + def common_location_path(project: str, location: str,) -> str: """Return a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, - location=location, + project=project, location=location, ) @staticmethod @@ -570,20 +511,12 @@ def search_migratable_resources( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.SearchMigratableResourcesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -679,12 +612,7 @@ def batch_migrate_resources( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation.from_gapic( diff --git a/google/cloud/aiplatform_v1beta1/services/model_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/model_service/async_client.py index 631671e269..3b27b6e184 100644 --- a/google/cloud/aiplatform_v1beta1/services/model_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/model_service/async_client.py @@ -246,12 +246,7 @@ async def upload_model( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -329,12 +324,7 @@ async def get_model( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -410,20 +400,12 @@ async def list_models( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListModelsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -507,12 +489,7 @@ async def update_model( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -602,12 +579,7 @@ async def delete_model( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -705,12 +677,7 @@ async def export_model( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -794,12 +761,7 @@ async def get_model_evaluation( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -875,20 +837,12 @@ async def list_model_evaluations( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListModelEvaluationsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -965,12 +919,7 @@ async def get_model_evaluation_slice( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -1047,20 +996,12 @@ async def list_model_evaluation_slices( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListModelEvaluationSlicesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. diff --git a/google/cloud/aiplatform_v1beta1/services/model_service/client.py b/google/cloud/aiplatform_v1beta1/services/model_service/client.py index 423a86bb70..30c00c0c9d 100644 --- a/google/cloud/aiplatform_v1beta1/services/model_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/model_service/client.py @@ -65,10 +65,7 @@ class ModelServiceClientMeta(type): _transport_registry["grpc"] = ModelServiceGrpcTransport _transport_registry["grpc_asyncio"] = ModelServiceGrpcAsyncIOTransport - def get_transport_class( - cls, - label: str = None, - ) -> Type[ModelServiceTransport]: + def get_transport_class(cls, label: str = None,) -> Type[ModelServiceTransport]: """Return an appropriate transport class. Args: @@ -154,16 +151,10 @@ def transport(self) -> ModelServiceTransport: return self._transport @staticmethod - def endpoint_path( - project: str, - location: str, - endpoint: str, - ) -> str: + def endpoint_path(project: str, location: str, endpoint: str,) -> str: """Return a fully-qualified endpoint string.""" return "projects/{project}/locations/{location}/endpoints/{endpoint}".format( - project=project, - location=location, - endpoint=endpoint, + project=project, location=location, endpoint=endpoint, ) @staticmethod @@ -176,16 +167,10 @@ def parse_endpoint_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def model_path( - project: str, - location: str, - model: str, - ) -> str: + def model_path(project: str, location: str, model: str,) -> str: """Return a fully-qualified model string.""" return "projects/{project}/locations/{location}/models/{model}".format( - project=project, - location=location, - model=model, + project=project, location=location, model=model, ) @staticmethod @@ -199,17 +184,11 @@ def parse_model_path(path: str) -> Dict[str, str]: @staticmethod def model_evaluation_path( - project: str, - location: str, - model: str, - evaluation: str, + project: str, location: str, model: str, evaluation: str, ) -> str: """Return a fully-qualified model_evaluation string.""" return "projects/{project}/locations/{location}/models/{model}/evaluations/{evaluation}".format( - project=project, - location=location, - model=model, - evaluation=evaluation, + project=project, location=location, model=model, evaluation=evaluation, ) @staticmethod @@ -223,11 +202,7 @@ def parse_model_evaluation_path(path: str) -> Dict[str, str]: @staticmethod def model_evaluation_slice_path( - project: str, - location: str, - model: str, - evaluation: str, - slice: str, + project: str, location: str, model: str, evaluation: str, slice: str, ) -> str: """Return a fully-qualified model_evaluation_slice string.""" return "projects/{project}/locations/{location}/models/{model}/evaluations/{evaluation}/slices/{slice}".format( @@ -249,15 +224,11 @@ def parse_model_evaluation_slice_path(path: str) -> Dict[str, str]: @staticmethod def training_pipeline_path( - project: str, - location: str, - training_pipeline: str, + project: str, location: str, training_pipeline: str, ) -> str: """Return a fully-qualified training_pipeline string.""" return "projects/{project}/locations/{location}/trainingPipelines/{training_pipeline}".format( - project=project, - location=location, - training_pipeline=training_pipeline, + project=project, location=location, training_pipeline=training_pipeline, ) @staticmethod @@ -270,9 +241,7 @@ def parse_training_pipeline_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_billing_account_path( - billing_account: str, - ) -> str: + def common_billing_account_path(billing_account: str,) -> str: """Return a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -285,13 +254,9 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path( - folder: str, - ) -> str: + def common_folder_path(folder: str,) -> str: """Return a fully-qualified folder string.""" - return "folders/{folder}".format( - folder=folder, - ) + return "folders/{folder}".format(folder=folder,) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -300,13 +265,9 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path( - organization: str, - ) -> str: + def common_organization_path(organization: str,) -> str: """Return a fully-qualified organization string.""" - return "organizations/{organization}".format( - organization=organization, - ) + return "organizations/{organization}".format(organization=organization,) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -315,13 +276,9 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path( - project: str, - ) -> str: + def common_project_path(project: str,) -> str: """Return a fully-qualified project string.""" - return "projects/{project}".format( - project=project, - ) + return "projects/{project}".format(project=project,) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -330,14 +287,10 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path( - project: str, - location: str, - ) -> str: + def common_location_path(project: str, location: str,) -> str: """Return a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, - location=location, + project=project, location=location, ) @staticmethod @@ -545,12 +498,7 @@ def upload_model( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -629,12 +577,7 @@ def get_model( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -711,20 +654,12 @@ def list_models( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListModelsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -809,12 +744,7 @@ def update_model( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -905,12 +835,7 @@ def delete_model( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -1009,12 +934,7 @@ def export_model( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -1099,12 +1019,7 @@ def get_model_evaluation( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -1181,20 +1096,12 @@ def list_model_evaluations( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListModelEvaluationsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -1274,12 +1181,7 @@ def get_model_evaluation_slice( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -1359,20 +1261,12 @@ def list_model_evaluation_slices( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListModelEvaluationSlicesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. diff --git a/google/cloud/aiplatform_v1beta1/services/model_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/model_service/transports/base.py index 17e7a98018..a0b896cdf4 100644 --- a/google/cloud/aiplatform_v1beta1/services/model_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/model_service/transports/base.py @@ -113,39 +113,25 @@ def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { self.upload_model: gapic_v1.method.wrap_method( - self.upload_model, - default_timeout=5.0, - client_info=client_info, + self.upload_model, default_timeout=5.0, client_info=client_info, ), self.get_model: gapic_v1.method.wrap_method( - self.get_model, - default_timeout=5.0, - client_info=client_info, + self.get_model, default_timeout=5.0, client_info=client_info, ), self.list_models: gapic_v1.method.wrap_method( - self.list_models, - default_timeout=5.0, - client_info=client_info, + self.list_models, default_timeout=5.0, client_info=client_info, ), self.update_model: gapic_v1.method.wrap_method( - self.update_model, - default_timeout=5.0, - client_info=client_info, + self.update_model, default_timeout=5.0, client_info=client_info, ), self.delete_model: gapic_v1.method.wrap_method( - self.delete_model, - default_timeout=5.0, - client_info=client_info, + self.delete_model, default_timeout=5.0, client_info=client_info, ), self.export_model: gapic_v1.method.wrap_method( - self.export_model, - default_timeout=5.0, - client_info=client_info, + self.export_model, default_timeout=5.0, client_info=client_info, ), self.get_model_evaluation: gapic_v1.method.wrap_method( - self.get_model_evaluation, - default_timeout=5.0, - client_info=client_info, + self.get_model_evaluation, default_timeout=5.0, client_info=client_info, ), self.list_model_evaluations: gapic_v1.method.wrap_method( self.list_model_evaluations, diff --git a/google/cloud/aiplatform_v1beta1/services/pipeline_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/pipeline_service/async_client.py index 9b3f2f7fa7..ef420aae0b 100644 --- a/google/cloud/aiplatform_v1beta1/services/pipeline_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/pipeline_service/async_client.py @@ -241,12 +241,7 @@ async def create_training_pipeline( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -324,12 +319,7 @@ async def get_training_pipeline( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -405,20 +395,12 @@ async def list_training_pipelines( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListTrainingPipelinesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -508,12 +490,7 @@ async def delete_training_pipeline( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -602,10 +579,7 @@ async def cancel_training_pipeline( # Send the request. await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, + request, retry=retry, timeout=timeout, metadata=metadata, ) diff --git a/google/cloud/aiplatform_v1beta1/services/pipeline_service/client.py b/google/cloud/aiplatform_v1beta1/services/pipeline_service/client.py index 73c79cc90d..e3e7d6aeda 100644 --- a/google/cloud/aiplatform_v1beta1/services/pipeline_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/pipeline_service/client.py @@ -67,10 +67,7 @@ class PipelineServiceClientMeta(type): _transport_registry["grpc"] = PipelineServiceGrpcTransport _transport_registry["grpc_asyncio"] = PipelineServiceGrpcAsyncIOTransport - def get_transport_class( - cls, - label: str = None, - ) -> Type[PipelineServiceTransport]: + def get_transport_class(cls, label: str = None,) -> Type[PipelineServiceTransport]: """Return an appropriate transport class. Args: @@ -156,16 +153,10 @@ def transport(self) -> PipelineServiceTransport: return self._transport @staticmethod - def endpoint_path( - project: str, - location: str, - endpoint: str, - ) -> str: + def endpoint_path(project: str, location: str, endpoint: str,) -> str: """Return a fully-qualified endpoint string.""" return "projects/{project}/locations/{location}/endpoints/{endpoint}".format( - project=project, - location=location, - endpoint=endpoint, + project=project, location=location, endpoint=endpoint, ) @staticmethod @@ -178,16 +169,10 @@ def parse_endpoint_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def model_path( - project: str, - location: str, - model: str, - ) -> str: + def model_path(project: str, location: str, model: str,) -> str: """Return a fully-qualified model string.""" return "projects/{project}/locations/{location}/models/{model}".format( - project=project, - location=location, - model=model, + project=project, location=location, model=model, ) @staticmethod @@ -201,15 +186,11 @@ def parse_model_path(path: str) -> Dict[str, str]: @staticmethod def training_pipeline_path( - project: str, - location: str, - training_pipeline: str, + project: str, location: str, training_pipeline: str, ) -> str: """Return a fully-qualified training_pipeline string.""" return "projects/{project}/locations/{location}/trainingPipelines/{training_pipeline}".format( - project=project, - location=location, - training_pipeline=training_pipeline, + project=project, location=location, training_pipeline=training_pipeline, ) @staticmethod @@ -222,9 +203,7 @@ def parse_training_pipeline_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_billing_account_path( - billing_account: str, - ) -> str: + def common_billing_account_path(billing_account: str,) -> str: """Return a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -237,13 +216,9 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path( - folder: str, - ) -> str: + def common_folder_path(folder: str,) -> str: """Return a fully-qualified folder string.""" - return "folders/{folder}".format( - folder=folder, - ) + return "folders/{folder}".format(folder=folder,) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -252,13 +227,9 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path( - organization: str, - ) -> str: + def common_organization_path(organization: str,) -> str: """Return a fully-qualified organization string.""" - return "organizations/{organization}".format( - organization=organization, - ) + return "organizations/{organization}".format(organization=organization,) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -267,13 +238,9 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path( - project: str, - ) -> str: + def common_project_path(project: str,) -> str: """Return a fully-qualified project string.""" - return "projects/{project}".format( - project=project, - ) + return "projects/{project}".format(project=project,) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -282,14 +249,10 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path( - project: str, - location: str, - ) -> str: + def common_location_path(project: str, location: str,) -> str: """Return a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, - location=location, + project=project, location=location, ) @staticmethod @@ -498,12 +461,7 @@ def create_training_pipeline( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -582,12 +540,7 @@ def get_training_pipeline( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -664,20 +617,12 @@ def list_training_pipelines( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListTrainingPipelinesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -768,12 +713,7 @@ def delete_training_pipeline( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -863,10 +803,7 @@ def cancel_training_pipeline( # Send the request. rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, + request, retry=retry, timeout=timeout, metadata=metadata, ) diff --git a/google/cloud/aiplatform_v1beta1/services/prediction_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/prediction_service/async_client.py index 4545ad95e1..bb58b0bfac 100644 --- a/google/cloud/aiplatform_v1beta1/services/prediction_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/prediction_service/async_client.py @@ -241,12 +241,7 @@ async def predict( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -372,12 +367,7 @@ async def explain( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response diff --git a/google/cloud/aiplatform_v1beta1/services/prediction_service/client.py b/google/cloud/aiplatform_v1beta1/services/prediction_service/client.py index 0a01fe3aae..9a5976d697 100644 --- a/google/cloud/aiplatform_v1beta1/services/prediction_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/prediction_service/client.py @@ -56,8 +56,7 @@ class PredictionServiceClientMeta(type): _transport_registry["grpc_asyncio"] = PredictionServiceGrpcAsyncIOTransport def get_transport_class( - cls, - label: str = None, + cls, label: str = None, ) -> Type[PredictionServiceTransport]: """Return an appropriate transport class. @@ -144,16 +143,10 @@ def transport(self) -> PredictionServiceTransport: return self._transport @staticmethod - def endpoint_path( - project: str, - location: str, - endpoint: str, - ) -> str: + def endpoint_path(project: str, location: str, endpoint: str,) -> str: """Return a fully-qualified endpoint string.""" return "projects/{project}/locations/{location}/endpoints/{endpoint}".format( - project=project, - location=location, - endpoint=endpoint, + project=project, location=location, endpoint=endpoint, ) @staticmethod @@ -166,9 +159,7 @@ def parse_endpoint_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_billing_account_path( - billing_account: str, - ) -> str: + def common_billing_account_path(billing_account: str,) -> str: """Return a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -181,13 +172,9 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path( - folder: str, - ) -> str: + def common_folder_path(folder: str,) -> str: """Return a fully-qualified folder string.""" - return "folders/{folder}".format( - folder=folder, - ) + return "folders/{folder}".format(folder=folder,) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -196,13 +183,9 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path( - organization: str, - ) -> str: + def common_organization_path(organization: str,) -> str: """Return a fully-qualified organization string.""" - return "organizations/{organization}".format( - organization=organization, - ) + return "organizations/{organization}".format(organization=organization,) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -211,13 +194,9 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path( - project: str, - ) -> str: + def common_project_path(project: str,) -> str: """Return a fully-qualified project string.""" - return "projects/{project}".format( - project=project, - ) + return "projects/{project}".format(project=project,) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -226,14 +205,10 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path( - project: str, - location: str, - ) -> str: + def common_location_path(project: str, location: str,) -> str: """Return a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, - location=location, + project=project, location=location, ) @staticmethod @@ -460,12 +435,7 @@ def predict( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -592,12 +562,7 @@ def explain( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response diff --git a/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/base.py index 739153f493..f2f7a028cc 100644 --- a/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/prediction_service/transports/base.py @@ -107,14 +107,10 @@ def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { self.predict: gapic_v1.method.wrap_method( - self.predict, - default_timeout=5.0, - client_info=client_info, + self.predict, default_timeout=5.0, client_info=client_info, ), self.explain: gapic_v1.method.wrap_method( - self.explain, - default_timeout=5.0, - client_info=client_info, + self.explain, default_timeout=5.0, client_info=client_info, ), } diff --git a/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/async_client.py b/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/async_client.py index d27ca66fe1..c693126d4c 100644 --- a/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/async_client.py +++ b/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/async_client.py @@ -247,12 +247,7 @@ async def create_specialist_pool( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -343,12 +338,7 @@ async def get_specialist_pool( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -424,20 +414,12 @@ async def list_specialist_pools( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListSpecialistPoolsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -527,12 +509,7 @@ async def delete_specialist_pool( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -633,12 +610,7 @@ async def update_specialist_pool( ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( diff --git a/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/client.py b/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/client.py index 58a55cd7f7..efc19eca12 100644 --- a/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/client.py +++ b/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/client.py @@ -62,8 +62,7 @@ class SpecialistPoolServiceClientMeta(type): _transport_registry["grpc_asyncio"] = SpecialistPoolServiceGrpcAsyncIOTransport def get_transport_class( - cls, - label: str = None, + cls, label: str = None, ) -> Type[SpecialistPoolServiceTransport]: """Return an appropriate transport class. @@ -156,16 +155,10 @@ def transport(self) -> SpecialistPoolServiceTransport: return self._transport @staticmethod - def specialist_pool_path( - project: str, - location: str, - specialist_pool: str, - ) -> str: + def specialist_pool_path(project: str, location: str, specialist_pool: str,) -> str: """Return a fully-qualified specialist_pool string.""" return "projects/{project}/locations/{location}/specialistPools/{specialist_pool}".format( - project=project, - location=location, - specialist_pool=specialist_pool, + project=project, location=location, specialist_pool=specialist_pool, ) @staticmethod @@ -178,9 +171,7 @@ def parse_specialist_pool_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_billing_account_path( - billing_account: str, - ) -> str: + def common_billing_account_path(billing_account: str,) -> str: """Return a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -193,13 +184,9 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path( - folder: str, - ) -> str: + def common_folder_path(folder: str,) -> str: """Return a fully-qualified folder string.""" - return "folders/{folder}".format( - folder=folder, - ) + return "folders/{folder}".format(folder=folder,) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -208,13 +195,9 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path( - organization: str, - ) -> str: + def common_organization_path(organization: str,) -> str: """Return a fully-qualified organization string.""" - return "organizations/{organization}".format( - organization=organization, - ) + return "organizations/{organization}".format(organization=organization,) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -223,13 +206,9 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path( - project: str, - ) -> str: + def common_project_path(project: str,) -> str: """Return a fully-qualified project string.""" - return "projects/{project}".format( - project=project, - ) + return "projects/{project}".format(project=project,) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -238,14 +217,10 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path( - project: str, - location: str, - ) -> str: + def common_location_path(project: str, location: str,) -> str: """Return a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, - location=location, + project=project, location=location, ) @staticmethod @@ -459,12 +434,7 @@ def create_specialist_pool( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -556,12 +526,7 @@ def get_specialist_pool( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -638,20 +603,12 @@ def list_specialist_pools( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListSpecialistPoolsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. @@ -742,12 +699,7 @@ def delete_specialist_pool( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = ga_operation.from_gapic( @@ -849,12 +801,7 @@ def update_specialist_pool( ) # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = ga_operation.from_gapic( diff --git a/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/transports/base.py b/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/transports/base.py index e4de291be3..a39c2f1f71 100644 --- a/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/transports/base.py +++ b/google/cloud/aiplatform_v1beta1/services/specialist_pool_service/transports/base.py @@ -115,9 +115,7 @@ def _prep_wrapped_messages(self, client_info): client_info=client_info, ), self.get_specialist_pool: gapic_v1.method.wrap_method( - self.get_specialist_pool, - default_timeout=5.0, - client_info=client_info, + self.get_specialist_pool, default_timeout=5.0, client_info=client_info, ), self.list_specialist_pools: gapic_v1.method.wrap_method( self.list_specialist_pools, diff --git a/google/cloud/aiplatform_v1beta1/types/accelerator_type.py b/google/cloud/aiplatform_v1beta1/types/accelerator_type.py index 23be882aaa..337b0eeaf5 100644 --- a/google/cloud/aiplatform_v1beta1/types/accelerator_type.py +++ b/google/cloud/aiplatform_v1beta1/types/accelerator_type.py @@ -19,10 +19,7 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", - manifest={ - "AcceleratorType", - }, + package="google.cloud.aiplatform.v1beta1", manifest={"AcceleratorType",}, ) diff --git a/google/cloud/aiplatform_v1beta1/types/annotation.py b/google/cloud/aiplatform_v1beta1/types/annotation.py index 93bd0481b1..7734fcc512 100644 --- a/google/cloud/aiplatform_v1beta1/types/annotation.py +++ b/google/cloud/aiplatform_v1beta1/types/annotation.py @@ -24,10 +24,7 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", - manifest={ - "Annotation", - }, + package="google.cloud.aiplatform.v1beta1", manifest={"Annotation",}, ) @@ -94,30 +91,16 @@ class Annotation(proto.Message): payload_schema_uri = proto.Field(proto.STRING, number=2) - payload = proto.Field( - proto.MESSAGE, - number=3, - message=struct.Value, - ) + payload = proto.Field(proto.MESSAGE, number=3, message=struct.Value,) - create_time = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp.Timestamp, - ) + create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) - update_time = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp.Timestamp, - ) + update_time = proto.Field(proto.MESSAGE, number=7, message=timestamp.Timestamp,) etag = proto.Field(proto.STRING, number=8) annotation_source = proto.Field( - proto.MESSAGE, - number=5, - message=user_action_reference.UserActionReference, + proto.MESSAGE, number=5, message=user_action_reference.UserActionReference, ) labels = proto.MapField(proto.STRING, proto.STRING, number=6) diff --git a/google/cloud/aiplatform_v1beta1/types/annotation_spec.py b/google/cloud/aiplatform_v1beta1/types/annotation_spec.py index 2d6e16e44f..a5a4b3d489 100644 --- a/google/cloud/aiplatform_v1beta1/types/annotation_spec.py +++ b/google/cloud/aiplatform_v1beta1/types/annotation_spec.py @@ -22,10 +22,7 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", - manifest={ - "AnnotationSpec", - }, + package="google.cloud.aiplatform.v1beta1", manifest={"AnnotationSpec",}, ) @@ -58,17 +55,9 @@ class AnnotationSpec(proto.Message): display_name = proto.Field(proto.STRING, number=2) - create_time = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp.Timestamp, - ) - - update_time = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp.Timestamp, - ) + create_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,) + + update_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) etag = proto.Field(proto.STRING, number=5) diff --git a/google/cloud/aiplatform_v1beta1/types/batch_prediction_job.py b/google/cloud/aiplatform_v1beta1/types/batch_prediction_job.py index 3d7501f3ce..625bf83155 100644 --- a/google/cloud/aiplatform_v1beta1/types/batch_prediction_job.py +++ b/google/cloud/aiplatform_v1beta1/types/batch_prediction_job.py @@ -34,10 +34,7 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", - manifest={ - "BatchPredictionJob", - }, + package="google.cloud.aiplatform.v1beta1", manifest={"BatchPredictionJob",}, ) @@ -214,17 +211,11 @@ class InputConfig(proto.Message): """ gcs_source = proto.Field( - proto.MESSAGE, - number=2, - oneof="source", - message=io.GcsSource, + proto.MESSAGE, number=2, oneof="source", message=io.GcsSource, ) bigquery_source = proto.Field( - proto.MESSAGE, - number=3, - oneof="source", - message=io.BigQuerySource, + proto.MESSAGE, number=3, oneof="source", message=io.BigQuerySource, ) instances_format = proto.Field(proto.STRING, number=1) @@ -296,10 +287,7 @@ class OutputConfig(proto.Message): """ gcs_destination = proto.Field( - proto.MESSAGE, - number=2, - oneof="destination", - message=io.GcsDestination, + proto.MESSAGE, number=2, oneof="destination", message=io.GcsDestination, ) bigquery_destination = proto.Field( @@ -340,28 +328,14 @@ class OutputInfo(proto.Message): model = proto.Field(proto.STRING, number=3) - input_config = proto.Field( - proto.MESSAGE, - number=4, - message=InputConfig, - ) + input_config = proto.Field(proto.MESSAGE, number=4, message=InputConfig,) - model_parameters = proto.Field( - proto.MESSAGE, - number=5, - message=struct.Value, - ) + model_parameters = proto.Field(proto.MESSAGE, number=5, message=struct.Value,) - output_config = proto.Field( - proto.MESSAGE, - number=6, - message=OutputConfig, - ) + output_config = proto.Field(proto.MESSAGE, number=6, message=OutputConfig,) dedicated_resources = proto.Field( - proto.MESSAGE, - number=7, - message=machine_resources.BatchDedicatedResources, + proto.MESSAGE, number=7, message=machine_resources.BatchDedicatedResources, ) manual_batch_tuning_parameters = proto.Field( @@ -373,70 +347,34 @@ class OutputInfo(proto.Message): generate_explanation = proto.Field(proto.BOOL, number=23) explanation_spec = proto.Field( - proto.MESSAGE, - number=25, - message=explanation.ExplanationSpec, + proto.MESSAGE, number=25, message=explanation.ExplanationSpec, ) - output_info = proto.Field( - proto.MESSAGE, - number=9, - message=OutputInfo, - ) + output_info = proto.Field(proto.MESSAGE, number=9, message=OutputInfo,) - state = proto.Field( - proto.ENUM, - number=10, - enum=job_state.JobState, - ) + state = proto.Field(proto.ENUM, number=10, enum=job_state.JobState,) - error = proto.Field( - proto.MESSAGE, - number=11, - message=status.Status, - ) + error = proto.Field(proto.MESSAGE, number=11, message=status.Status,) partial_failures = proto.RepeatedField( - proto.MESSAGE, - number=12, - message=status.Status, + proto.MESSAGE, number=12, message=status.Status, ) resources_consumed = proto.Field( - proto.MESSAGE, - number=13, - message=machine_resources.ResourcesConsumed, + proto.MESSAGE, number=13, message=machine_resources.ResourcesConsumed, ) completion_stats = proto.Field( - proto.MESSAGE, - number=14, - message=gca_completion_stats.CompletionStats, + proto.MESSAGE, number=14, message=gca_completion_stats.CompletionStats, ) - create_time = proto.Field( - proto.MESSAGE, - number=15, - message=timestamp.Timestamp, - ) + create_time = proto.Field(proto.MESSAGE, number=15, message=timestamp.Timestamp,) - start_time = proto.Field( - proto.MESSAGE, - number=16, - message=timestamp.Timestamp, - ) + start_time = proto.Field(proto.MESSAGE, number=16, message=timestamp.Timestamp,) - end_time = proto.Field( - proto.MESSAGE, - number=17, - message=timestamp.Timestamp, - ) + end_time = proto.Field(proto.MESSAGE, number=17, message=timestamp.Timestamp,) - update_time = proto.Field( - proto.MESSAGE, - number=18, - message=timestamp.Timestamp, - ) + update_time = proto.Field(proto.MESSAGE, number=18, message=timestamp.Timestamp,) labels = proto.MapField(proto.STRING, proto.STRING, number=19) diff --git a/google/cloud/aiplatform_v1beta1/types/completion_stats.py b/google/cloud/aiplatform_v1beta1/types/completion_stats.py index f2626b9c9b..165be59634 100644 --- a/google/cloud/aiplatform_v1beta1/types/completion_stats.py +++ b/google/cloud/aiplatform_v1beta1/types/completion_stats.py @@ -19,10 +19,7 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", - manifest={ - "CompletionStats", - }, + package="google.cloud.aiplatform.v1beta1", manifest={"CompletionStats",}, ) diff --git a/google/cloud/aiplatform_v1beta1/types/custom_job.py b/google/cloud/aiplatform_v1beta1/types/custom_job.py index d4e2a086bd..2d8745538c 100644 --- a/google/cloud/aiplatform_v1beta1/types/custom_job.py +++ b/google/cloud/aiplatform_v1beta1/types/custom_job.py @@ -89,47 +89,19 @@ class CustomJob(proto.Message): display_name = proto.Field(proto.STRING, number=2) - job_spec = proto.Field( - proto.MESSAGE, - number=4, - message="CustomJobSpec", - ) + job_spec = proto.Field(proto.MESSAGE, number=4, message="CustomJobSpec",) - state = proto.Field( - proto.ENUM, - number=5, - enum=job_state.JobState, - ) + state = proto.Field(proto.ENUM, number=5, enum=job_state.JobState,) - create_time = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp.Timestamp, - ) + create_time = proto.Field(proto.MESSAGE, number=6, message=timestamp.Timestamp,) - start_time = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp.Timestamp, - ) + start_time = proto.Field(proto.MESSAGE, number=7, message=timestamp.Timestamp,) - end_time = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp.Timestamp, - ) + end_time = proto.Field(proto.MESSAGE, number=8, message=timestamp.Timestamp,) - update_time = proto.Field( - proto.MESSAGE, - number=9, - message=timestamp.Timestamp, - ) + update_time = proto.Field(proto.MESSAGE, number=9, message=timestamp.Timestamp,) - error = proto.Field( - proto.MESSAGE, - number=10, - message=status.Status, - ) + error = proto.Field(proto.MESSAGE, number=10, message=status.Status,) labels = proto.MapField(proto.STRING, proto.STRING, number=11) @@ -195,25 +167,17 @@ class CustomJobSpec(proto.Message): """ worker_pool_specs = proto.RepeatedField( - proto.MESSAGE, - number=1, - message="WorkerPoolSpec", + proto.MESSAGE, number=1, message="WorkerPoolSpec", ) - scheduling = proto.Field( - proto.MESSAGE, - number=3, - message="Scheduling", - ) + scheduling = proto.Field(proto.MESSAGE, number=3, message="Scheduling",) service_account = proto.Field(proto.STRING, number=4) network = proto.Field(proto.STRING, number=5) base_output_directory = proto.Field( - proto.MESSAGE, - number=6, - message=io.GcsDestination, + proto.MESSAGE, number=6, message=io.GcsDestination, ) @@ -236,31 +200,21 @@ class WorkerPoolSpec(proto.Message): """ container_spec = proto.Field( - proto.MESSAGE, - number=6, - oneof="task", - message="ContainerSpec", + proto.MESSAGE, number=6, oneof="task", message="ContainerSpec", ) python_package_spec = proto.Field( - proto.MESSAGE, - number=7, - oneof="task", - message="PythonPackageSpec", + proto.MESSAGE, number=7, oneof="task", message="PythonPackageSpec", ) machine_spec = proto.Field( - proto.MESSAGE, - number=1, - message=machine_resources.MachineSpec, + proto.MESSAGE, number=1, message=machine_resources.MachineSpec, ) replica_count = proto.Field(proto.INT64, number=2) disk_spec = proto.Field( - proto.MESSAGE, - number=5, - message=machine_resources.DiskSpec, + proto.MESSAGE, number=5, message=machine_resources.DiskSpec, ) @@ -336,11 +290,7 @@ class Scheduling(proto.Message): to workers leaving and joining a job. """ - timeout = proto.Field( - proto.MESSAGE, - number=1, - message=duration.Duration, - ) + timeout = proto.Field(proto.MESSAGE, number=1, message=duration.Duration,) restart_job_on_worker_restart = proto.Field(proto.BOOL, number=3) diff --git a/google/cloud/aiplatform_v1beta1/types/data_item.py b/google/cloud/aiplatform_v1beta1/types/data_item.py index 8ef4b9c8c6..e43a944d94 100644 --- a/google/cloud/aiplatform_v1beta1/types/data_item.py +++ b/google/cloud/aiplatform_v1beta1/types/data_item.py @@ -23,10 +23,7 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", - manifest={ - "DataItem", - }, + package="google.cloud.aiplatform.v1beta1", manifest={"DataItem",}, ) @@ -73,25 +70,13 @@ class DataItem(proto.Message): name = proto.Field(proto.STRING, number=1) - create_time = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp.Timestamp, - ) + create_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) - update_time = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp.Timestamp, - ) + update_time = proto.Field(proto.MESSAGE, number=6, message=timestamp.Timestamp,) labels = proto.MapField(proto.STRING, proto.STRING, number=3) - payload = proto.Field( - proto.MESSAGE, - number=4, - message=struct.Value, - ) + payload = proto.Field(proto.MESSAGE, number=4, message=struct.Value,) etag = proto.Field(proto.STRING, number=7) diff --git a/google/cloud/aiplatform_v1beta1/types/data_labeling_job.py b/google/cloud/aiplatform_v1beta1/types/data_labeling_job.py index c1542d0661..af1bcdd871 100644 --- a/google/cloud/aiplatform_v1beta1/types/data_labeling_job.py +++ b/google/cloud/aiplatform_v1beta1/types/data_labeling_job.py @@ -146,52 +146,26 @@ class DataLabelingJob(proto.Message): inputs_schema_uri = proto.Field(proto.STRING, number=6) - inputs = proto.Field( - proto.MESSAGE, - number=7, - message=struct.Value, - ) + inputs = proto.Field(proto.MESSAGE, number=7, message=struct.Value,) - state = proto.Field( - proto.ENUM, - number=8, - enum=job_state.JobState, - ) + state = proto.Field(proto.ENUM, number=8, enum=job_state.JobState,) labeling_progress = proto.Field(proto.INT32, number=13) - current_spend = proto.Field( - proto.MESSAGE, - number=14, - message=money.Money, - ) + current_spend = proto.Field(proto.MESSAGE, number=14, message=money.Money,) - create_time = proto.Field( - proto.MESSAGE, - number=9, - message=timestamp.Timestamp, - ) + create_time = proto.Field(proto.MESSAGE, number=9, message=timestamp.Timestamp,) - update_time = proto.Field( - proto.MESSAGE, - number=10, - message=timestamp.Timestamp, - ) + update_time = proto.Field(proto.MESSAGE, number=10, message=timestamp.Timestamp,) - error = proto.Field( - proto.MESSAGE, - number=22, - message=status.Status, - ) + error = proto.Field(proto.MESSAGE, number=22, message=status.Status,) labels = proto.MapField(proto.STRING, proto.STRING, number=11) specialist_pools = proto.RepeatedField(proto.STRING, number=16) active_learning_config = proto.Field( - proto.MESSAGE, - number=21, - message="ActiveLearningConfig", + proto.MESSAGE, number=21, message="ActiveLearningConfig", ) @@ -228,17 +202,9 @@ class ActiveLearningConfig(proto.Message): proto.INT32, number=2, oneof="human_labeling_budget" ) - sample_config = proto.Field( - proto.MESSAGE, - number=3, - message="SampleConfig", - ) + sample_config = proto.Field(proto.MESSAGE, number=3, message="SampleConfig",) - training_config = proto.Field( - proto.MESSAGE, - number=4, - message="TrainingConfig", - ) + training_config = proto.Field(proto.MESSAGE, number=4, message="TrainingConfig",) class SampleConfig(proto.Message): @@ -275,11 +241,7 @@ class SampleStrategy(proto.Enum): proto.INT32, number=3, oneof="following_batch_sample_size" ) - sample_strategy = proto.Field( - proto.ENUM, - number=5, - enum=SampleStrategy, - ) + sample_strategy = proto.Field(proto.ENUM, number=5, enum=SampleStrategy,) class TrainingConfig(proto.Message): diff --git a/google/cloud/aiplatform_v1beta1/types/dataset.py b/google/cloud/aiplatform_v1beta1/types/dataset.py index 8b93a04e1b..76f6462f40 100644 --- a/google/cloud/aiplatform_v1beta1/types/dataset.py +++ b/google/cloud/aiplatform_v1beta1/types/dataset.py @@ -25,11 +25,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1", - manifest={ - "Dataset", - "ImportDataConfig", - "ExportDataConfig", - }, + manifest={"Dataset", "ImportDataConfig", "ExportDataConfig",}, ) @@ -92,23 +88,11 @@ class Dataset(proto.Message): metadata_schema_uri = proto.Field(proto.STRING, number=3) - metadata = proto.Field( - proto.MESSAGE, - number=8, - message=struct.Value, - ) + metadata = proto.Field(proto.MESSAGE, number=8, message=struct.Value,) - create_time = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp.Timestamp, - ) + create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) - update_time = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp.Timestamp, - ) + update_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,) etag = proto.Field(proto.STRING, number=6) @@ -148,10 +132,7 @@ class ImportDataConfig(proto.Message): """ gcs_source = proto.Field( - proto.MESSAGE, - number=1, - oneof="source", - message=io.GcsSource, + proto.MESSAGE, number=1, oneof="source", message=io.GcsSource, ) data_item_labels = proto.MapField(proto.STRING, proto.STRING, number=2) @@ -185,10 +166,7 @@ class ExportDataConfig(proto.Message): """ gcs_destination = proto.Field( - proto.MESSAGE, - number=1, - oneof="destination", - message=io.GcsDestination, + proto.MESSAGE, number=1, oneof="destination", message=io.GcsDestination, ) annotations_filter = proto.Field(proto.STRING, number=2) diff --git a/google/cloud/aiplatform_v1beta1/types/dataset_service.py b/google/cloud/aiplatform_v1beta1/types/dataset_service.py index aebd5ebb31..7160b7b52f 100644 --- a/google/cloud/aiplatform_v1beta1/types/dataset_service.py +++ b/google/cloud/aiplatform_v1beta1/types/dataset_service.py @@ -65,11 +65,7 @@ class CreateDatasetRequest(proto.Message): parent = proto.Field(proto.STRING, number=1) - dataset = proto.Field( - proto.MESSAGE, - number=2, - message=gca_dataset.Dataset, - ) + dataset = proto.Field(proto.MESSAGE, number=2, message=gca_dataset.Dataset,) class CreateDatasetOperationMetadata(proto.Message): @@ -82,9 +78,7 @@ class CreateDatasetOperationMetadata(proto.Message): """ generic_metadata = proto.Field( - proto.MESSAGE, - number=1, - message=operation.GenericOperationMetadata, + proto.MESSAGE, number=1, message=operation.GenericOperationMetadata, ) @@ -101,11 +95,7 @@ class GetDatasetRequest(proto.Message): name = proto.Field(proto.STRING, number=1) - read_mask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask.FieldMask, - ) + read_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) class UpdateDatasetRequest(proto.Message): @@ -128,17 +118,9 @@ class UpdateDatasetRequest(proto.Message): - ``labels`` """ - dataset = proto.Field( - proto.MESSAGE, - number=1, - message=gca_dataset.Dataset, - ) + dataset = proto.Field(proto.MESSAGE, number=1, message=gca_dataset.Dataset,) - update_mask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask.FieldMask, - ) + update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) class ListDatasetsRequest(proto.Message): @@ -175,11 +157,7 @@ class ListDatasetsRequest(proto.Message): page_token = proto.Field(proto.STRING, number=4) - read_mask = proto.Field( - proto.MESSAGE, - number=5, - message=field_mask.FieldMask, - ) + read_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask.FieldMask,) order_by = proto.Field(proto.STRING, number=6) @@ -201,9 +179,7 @@ def raw_page(self): return self datasets = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_dataset.Dataset, + proto.MESSAGE, number=1, message=gca_dataset.Dataset, ) next_page_token = proto.Field(proto.STRING, number=2) @@ -240,9 +216,7 @@ class ImportDataRequest(proto.Message): name = proto.Field(proto.STRING, number=1) import_configs = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=gca_dataset.ImportDataConfig, + proto.MESSAGE, number=2, message=gca_dataset.ImportDataConfig, ) @@ -262,9 +236,7 @@ class ImportDataOperationMetadata(proto.Message): """ generic_metadata = proto.Field( - proto.MESSAGE, - number=1, - message=operation.GenericOperationMetadata, + proto.MESSAGE, number=1, message=operation.GenericOperationMetadata, ) @@ -283,9 +255,7 @@ class ExportDataRequest(proto.Message): name = proto.Field(proto.STRING, number=1) export_config = proto.Field( - proto.MESSAGE, - number=2, - message=gca_dataset.ExportDataConfig, + proto.MESSAGE, number=2, message=gca_dataset.ExportDataConfig, ) @@ -316,9 +286,7 @@ class ExportDataOperationMetadata(proto.Message): """ generic_metadata = proto.Field( - proto.MESSAGE, - number=1, - message=operation.GenericOperationMetadata, + proto.MESSAGE, number=1, message=operation.GenericOperationMetadata, ) gcs_output_directory = proto.Field(proto.STRING, number=2) @@ -355,11 +323,7 @@ class ListDataItemsRequest(proto.Message): page_token = proto.Field(proto.STRING, number=4) - read_mask = proto.Field( - proto.MESSAGE, - number=5, - message=field_mask.FieldMask, - ) + read_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask.FieldMask,) order_by = proto.Field(proto.STRING, number=6) @@ -381,9 +345,7 @@ def raw_page(self): return self data_items = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=data_item.DataItem, + proto.MESSAGE, number=1, message=data_item.DataItem, ) next_page_token = proto.Field(proto.STRING, number=2) @@ -404,11 +366,7 @@ class GetAnnotationSpecRequest(proto.Message): name = proto.Field(proto.STRING, number=1) - read_mask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask.FieldMask, - ) + read_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) class ListAnnotationsRequest(proto.Message): @@ -443,11 +401,7 @@ class ListAnnotationsRequest(proto.Message): page_token = proto.Field(proto.STRING, number=4) - read_mask = proto.Field( - proto.MESSAGE, - number=5, - message=field_mask.FieldMask, - ) + read_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask.FieldMask,) order_by = proto.Field(proto.STRING, number=6) @@ -469,9 +423,7 @@ def raw_page(self): return self annotations = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=annotation.Annotation, + proto.MESSAGE, number=1, message=annotation.Annotation, ) next_page_token = proto.Field(proto.STRING, number=2) diff --git a/google/cloud/aiplatform_v1beta1/types/deployed_model_ref.py b/google/cloud/aiplatform_v1beta1/types/deployed_model_ref.py index f94dc7793a..b0ec7010a2 100644 --- a/google/cloud/aiplatform_v1beta1/types/deployed_model_ref.py +++ b/google/cloud/aiplatform_v1beta1/types/deployed_model_ref.py @@ -19,10 +19,7 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", - manifest={ - "DeployedModelRef", - }, + package="google.cloud.aiplatform.v1beta1", manifest={"DeployedModelRef",}, ) diff --git a/google/cloud/aiplatform_v1beta1/types/endpoint.py b/google/cloud/aiplatform_v1beta1/types/endpoint.py index 326ca3c35c..f1ba6ed85d 100644 --- a/google/cloud/aiplatform_v1beta1/types/endpoint.py +++ b/google/cloud/aiplatform_v1beta1/types/endpoint.py @@ -24,11 +24,7 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", - manifest={ - "Endpoint", - "DeployedModel", - }, + package="google.cloud.aiplatform.v1beta1", manifest={"Endpoint", "DeployedModel",}, ) @@ -92,9 +88,7 @@ class Endpoint(proto.Message): description = proto.Field(proto.STRING, number=3) deployed_models = proto.RepeatedField( - proto.MESSAGE, - number=4, - message="DeployedModel", + proto.MESSAGE, number=4, message="DeployedModel", ) traffic_split = proto.MapField(proto.STRING, proto.INT32, number=5) @@ -103,17 +97,9 @@ class Endpoint(proto.Message): labels = proto.MapField(proto.STRING, proto.STRING, number=7) - create_time = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp.Timestamp, - ) + create_time = proto.Field(proto.MESSAGE, number=8, message=timestamp.Timestamp,) - update_time = proto.Field( - proto.MESSAGE, - number=9, - message=timestamp.Timestamp, - ) + update_time = proto.Field(proto.MESSAGE, number=9, message=timestamp.Timestamp,) class DeployedModel(proto.Message): @@ -207,16 +193,10 @@ class DeployedModel(proto.Message): display_name = proto.Field(proto.STRING, number=3) - create_time = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp.Timestamp, - ) + create_time = proto.Field(proto.MESSAGE, number=6, message=timestamp.Timestamp,) explanation_spec = proto.Field( - proto.MESSAGE, - number=9, - message=explanation.ExplanationSpec, + proto.MESSAGE, number=9, message=explanation.ExplanationSpec, ) service_account = proto.Field(proto.STRING, number=11) diff --git a/google/cloud/aiplatform_v1beta1/types/endpoint_service.py b/google/cloud/aiplatform_v1beta1/types/endpoint_service.py index 659268cd22..4bc9f35594 100644 --- a/google/cloud/aiplatform_v1beta1/types/endpoint_service.py +++ b/google/cloud/aiplatform_v1beta1/types/endpoint_service.py @@ -58,11 +58,7 @@ class CreateEndpointRequest(proto.Message): parent = proto.Field(proto.STRING, number=1) - endpoint = proto.Field( - proto.MESSAGE, - number=2, - message=gca_endpoint.Endpoint, - ) + endpoint = proto.Field(proto.MESSAGE, number=2, message=gca_endpoint.Endpoint,) class CreateEndpointOperationMetadata(proto.Message): @@ -75,9 +71,7 @@ class CreateEndpointOperationMetadata(proto.Message): """ generic_metadata = proto.Field( - proto.MESSAGE, - number=1, - message=operation.GenericOperationMetadata, + proto.MESSAGE, number=1, message=operation.GenericOperationMetadata, ) @@ -149,11 +143,7 @@ class ListEndpointsRequest(proto.Message): page_token = proto.Field(proto.STRING, number=4) - read_mask = proto.Field( - proto.MESSAGE, - number=5, - message=field_mask.FieldMask, - ) + read_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask.FieldMask,) class ListEndpointsResponse(proto.Message): @@ -174,9 +164,7 @@ def raw_page(self): return self endpoints = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_endpoint.Endpoint, + proto.MESSAGE, number=1, message=gca_endpoint.Endpoint, ) next_page_token = proto.Field(proto.STRING, number=2) @@ -195,17 +183,9 @@ class UpdateEndpointRequest(proto.Message): resource. """ - endpoint = proto.Field( - proto.MESSAGE, - number=1, - message=gca_endpoint.Endpoint, - ) + endpoint = proto.Field(proto.MESSAGE, number=1, message=gca_endpoint.Endpoint,) - update_mask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask.FieldMask, - ) + update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) class DeleteEndpointRequest(proto.Message): @@ -259,9 +239,7 @@ class DeployModelRequest(proto.Message): endpoint = proto.Field(proto.STRING, number=1) deployed_model = proto.Field( - proto.MESSAGE, - number=2, - message=gca_endpoint.DeployedModel, + proto.MESSAGE, number=2, message=gca_endpoint.DeployedModel, ) traffic_split = proto.MapField(proto.STRING, proto.INT32, number=3) @@ -278,9 +256,7 @@ class DeployModelResponse(proto.Message): """ deployed_model = proto.Field( - proto.MESSAGE, - number=1, - message=gca_endpoint.DeployedModel, + proto.MESSAGE, number=1, message=gca_endpoint.DeployedModel, ) @@ -294,9 +270,7 @@ class DeployModelOperationMetadata(proto.Message): """ generic_metadata = proto.Field( - proto.MESSAGE, - number=1, - message=operation.GenericOperationMetadata, + proto.MESSAGE, number=1, message=operation.GenericOperationMetadata, ) @@ -346,9 +320,7 @@ class UndeployModelOperationMetadata(proto.Message): """ generic_metadata = proto.Field( - proto.MESSAGE, - number=1, - message=operation.GenericOperationMetadata, + proto.MESSAGE, number=1, message=operation.GenericOperationMetadata, ) diff --git a/google/cloud/aiplatform_v1beta1/types/env_var.py b/google/cloud/aiplatform_v1beta1/types/env_var.py index 74b460116d..207e8275cd 100644 --- a/google/cloud/aiplatform_v1beta1/types/env_var.py +++ b/google/cloud/aiplatform_v1beta1/types/env_var.py @@ -19,10 +19,7 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", - manifest={ - "EnvVar", - }, + package="google.cloud.aiplatform.v1beta1", manifest={"EnvVar",}, ) diff --git a/google/cloud/aiplatform_v1beta1/types/explanation.py b/google/cloud/aiplatform_v1beta1/types/explanation.py index 4b7c14c490..7a495fff1e 100644 --- a/google/cloud/aiplatform_v1beta1/types/explanation.py +++ b/google/cloud/aiplatform_v1beta1/types/explanation.py @@ -71,11 +71,7 @@ class Explanation(proto.Message): in the same order as they appear in the output_indices. """ - attributions = proto.RepeatedField( - proto.MESSAGE, - number=1, - message="Attribution", - ) + attributions = proto.RepeatedField(proto.MESSAGE, number=1, message="Attribution",) class ModelExplanation(proto.Message): @@ -113,9 +109,7 @@ class ModelExplanation(proto.Message): """ mean_attributions = proto.RepeatedField( - proto.MESSAGE, - number=1, - message="Attribution", + proto.MESSAGE, number=1, message="Attribution", ) @@ -239,11 +233,7 @@ class Attribution(proto.Message): instance_output_value = proto.Field(proto.DOUBLE, number=2) - feature_attributions = proto.Field( - proto.MESSAGE, - number=3, - message=struct.Value, - ) + feature_attributions = proto.Field(proto.MESSAGE, number=3, message=struct.Value,) output_index = proto.RepeatedField(proto.INT32, number=4) @@ -266,16 +256,10 @@ class ExplanationSpec(proto.Message): input and output for explanation. """ - parameters = proto.Field( - proto.MESSAGE, - number=1, - message="ExplanationParameters", - ) + parameters = proto.Field(proto.MESSAGE, number=1, message="ExplanationParameters",) metadata = proto.Field( - proto.MESSAGE, - number=2, - message=explanation_metadata.ExplanationMetadata, + proto.MESSAGE, number=2, message=explanation_metadata.ExplanationMetadata, ) @@ -333,10 +317,7 @@ class ExplanationParameters(proto.Message): """ sampled_shapley_attribution = proto.Field( - proto.MESSAGE, - number=1, - oneof="method", - message="SampledShapleyAttribution", + proto.MESSAGE, number=1, oneof="method", message="SampledShapleyAttribution", ) integrated_gradients_attribution = proto.Field( @@ -347,19 +328,12 @@ class ExplanationParameters(proto.Message): ) xrai_attribution = proto.Field( - proto.MESSAGE, - number=3, - oneof="method", - message="XraiAttribution", + proto.MESSAGE, number=3, oneof="method", message="XraiAttribution", ) top_k = proto.Field(proto.INT32, number=4) - output_indices = proto.Field( - proto.MESSAGE, - number=5, - message=struct.ListValue, - ) + output_indices = proto.Field(proto.MESSAGE, number=5, message=struct.ListValue,) class SampledShapleyAttribution(proto.Message): @@ -407,9 +381,7 @@ class IntegratedGradientsAttribution(proto.Message): step_count = proto.Field(proto.INT32, number=1) smooth_grad_config = proto.Field( - proto.MESSAGE, - number=2, - message="SmoothGradConfig", + proto.MESSAGE, number=2, message="SmoothGradConfig", ) @@ -444,9 +416,7 @@ class XraiAttribution(proto.Message): step_count = proto.Field(proto.INT32, number=1) smooth_grad_config = proto.Field( - proto.MESSAGE, - number=2, - message="SmoothGradConfig", + proto.MESSAGE, number=2, message="SmoothGradConfig", ) @@ -538,9 +508,7 @@ class NoiseSigmaForFeature(proto.Message): sigma = proto.Field(proto.FLOAT, number=2) noise_sigma = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=NoiseSigmaForFeature, + proto.MESSAGE, number=1, message=NoiseSigmaForFeature, ) diff --git a/google/cloud/aiplatform_v1beta1/types/explanation_metadata.py b/google/cloud/aiplatform_v1beta1/types/explanation_metadata.py index 78c46d1dd0..7261c064f8 100644 --- a/google/cloud/aiplatform_v1beta1/types/explanation_metadata.py +++ b/google/cloud/aiplatform_v1beta1/types/explanation_metadata.py @@ -22,10 +22,7 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", - manifest={ - "ExplanationMetadata", - }, + package="google.cloud.aiplatform.v1beta1", manifest={"ExplanationMetadata",}, ) @@ -319,17 +316,13 @@ class OverlayType(proto.Enum): ) input_baselines = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=struct.Value, + proto.MESSAGE, number=1, message=struct.Value, ) input_tensor_name = proto.Field(proto.STRING, number=2) encoding = proto.Field( - proto.ENUM, - number=3, - enum="ExplanationMetadata.InputMetadata.Encoding", + proto.ENUM, number=3, enum="ExplanationMetadata.InputMetadata.Encoding", ) modality = proto.Field(proto.STRING, number=4) @@ -349,9 +342,7 @@ class OverlayType(proto.Enum): encoded_tensor_name = proto.Field(proto.STRING, number=9) encoded_baselines = proto.RepeatedField( - proto.MESSAGE, - number=10, - message=struct.Value, + proto.MESSAGE, number=10, message=struct.Value, ) visualization = proto.Field( @@ -400,10 +391,7 @@ class OutputMetadata(proto.Message): """ index_display_name_mapping = proto.Field( - proto.MESSAGE, - number=1, - oneof="display_name_mapping", - message=struct.Value, + proto.MESSAGE, number=1, oneof="display_name_mapping", message=struct.Value, ) display_name_mapping_key = proto.Field( @@ -413,17 +401,11 @@ class OutputMetadata(proto.Message): output_tensor_name = proto.Field(proto.STRING, number=3) inputs = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=1, - message=InputMetadata, + proto.STRING, proto.MESSAGE, number=1, message=InputMetadata, ) outputs = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=2, - message=OutputMetadata, + proto.STRING, proto.MESSAGE, number=2, message=OutputMetadata, ) feature_attributions_schema_uri = proto.Field(proto.STRING, number=3) diff --git a/google/cloud/aiplatform_v1beta1/types/hyperparameter_tuning_job.py b/google/cloud/aiplatform_v1beta1/types/hyperparameter_tuning_job.py index 186963683c..78af635e79 100644 --- a/google/cloud/aiplatform_v1beta1/types/hyperparameter_tuning_job.py +++ b/google/cloud/aiplatform_v1beta1/types/hyperparameter_tuning_job.py @@ -26,10 +26,7 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", - manifest={ - "HyperparameterTuningJob", - }, + package="google.cloud.aiplatform.v1beta1", manifest={"HyperparameterTuningJob",}, ) @@ -102,11 +99,7 @@ class HyperparameterTuningJob(proto.Message): display_name = proto.Field(proto.STRING, number=2) - study_spec = proto.Field( - proto.MESSAGE, - number=4, - message=study.StudySpec, - ) + study_spec = proto.Field(proto.MESSAGE, number=4, message=study.StudySpec,) max_trial_count = proto.Field(proto.INT32, number=5) @@ -115,52 +108,22 @@ class HyperparameterTuningJob(proto.Message): max_failed_trial_count = proto.Field(proto.INT32, number=7) trial_job_spec = proto.Field( - proto.MESSAGE, - number=8, - message=custom_job.CustomJobSpec, + proto.MESSAGE, number=8, message=custom_job.CustomJobSpec, ) - trials = proto.RepeatedField( - proto.MESSAGE, - number=9, - message=study.Trial, - ) + trials = proto.RepeatedField(proto.MESSAGE, number=9, message=study.Trial,) - state = proto.Field( - proto.ENUM, - number=10, - enum=job_state.JobState, - ) + state = proto.Field(proto.ENUM, number=10, enum=job_state.JobState,) - create_time = proto.Field( - proto.MESSAGE, - number=11, - message=timestamp.Timestamp, - ) + create_time = proto.Field(proto.MESSAGE, number=11, message=timestamp.Timestamp,) - start_time = proto.Field( - proto.MESSAGE, - number=12, - message=timestamp.Timestamp, - ) + start_time = proto.Field(proto.MESSAGE, number=12, message=timestamp.Timestamp,) - end_time = proto.Field( - proto.MESSAGE, - number=13, - message=timestamp.Timestamp, - ) + end_time = proto.Field(proto.MESSAGE, number=13, message=timestamp.Timestamp,) - update_time = proto.Field( - proto.MESSAGE, - number=14, - message=timestamp.Timestamp, - ) + update_time = proto.Field(proto.MESSAGE, number=14, message=timestamp.Timestamp,) - error = proto.Field( - proto.MESSAGE, - number=15, - message=status.Status, - ) + error = proto.Field(proto.MESSAGE, number=15, message=status.Status,) labels = proto.MapField(proto.STRING, proto.STRING, number=16) diff --git a/google/cloud/aiplatform_v1beta1/types/job_service.py b/google/cloud/aiplatform_v1beta1/types/job_service.py index 9962e81c40..f64f07cbe3 100644 --- a/google/cloud/aiplatform_v1beta1/types/job_service.py +++ b/google/cloud/aiplatform_v1beta1/types/job_service.py @@ -77,11 +77,7 @@ class CreateCustomJobRequest(proto.Message): parent = proto.Field(proto.STRING, number=1) - custom_job = proto.Field( - proto.MESSAGE, - number=2, - message=gca_custom_job.CustomJob, - ) + custom_job = proto.Field(proto.MESSAGE, number=2, message=gca_custom_job.CustomJob,) class GetCustomJobRequest(proto.Message): @@ -144,11 +140,7 @@ class ListCustomJobsRequest(proto.Message): page_token = proto.Field(proto.STRING, number=4) - read_mask = proto.Field( - proto.MESSAGE, - number=5, - message=field_mask.FieldMask, - ) + read_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask.FieldMask,) class ListCustomJobsResponse(proto.Message): @@ -169,9 +161,7 @@ def raw_page(self): return self custom_jobs = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_custom_job.CustomJob, + proto.MESSAGE, number=1, message=gca_custom_job.CustomJob, ) next_page_token = proto.Field(proto.STRING, number=2) @@ -219,9 +209,7 @@ class CreateDataLabelingJobRequest(proto.Message): parent = proto.Field(proto.STRING, number=1) data_labeling_job = proto.Field( - proto.MESSAGE, - number=2, - message=gca_data_labeling_job.DataLabelingJob, + proto.MESSAGE, number=2, message=gca_data_labeling_job.DataLabelingJob, ) @@ -287,11 +275,7 @@ class ListDataLabelingJobsRequest(proto.Message): page_token = proto.Field(proto.STRING, number=4) - read_mask = proto.Field( - proto.MESSAGE, - number=5, - message=field_mask.FieldMask, - ) + read_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask.FieldMask,) order_by = proto.Field(proto.STRING, number=6) @@ -313,9 +297,7 @@ def raw_page(self): return self data_labeling_jobs = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_data_labeling_job.DataLabelingJob, + proto.MESSAGE, number=1, message=gca_data_labeling_job.DataLabelingJob, ) next_page_token = proto.Field(proto.STRING, number=2) @@ -435,11 +417,7 @@ class ListHyperparameterTuningJobsRequest(proto.Message): page_token = proto.Field(proto.STRING, number=4) - read_mask = proto.Field( - proto.MESSAGE, - number=5, - message=field_mask.FieldMask, - ) + read_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask.FieldMask,) class ListHyperparameterTuningJobsResponse(proto.Message): @@ -516,9 +494,7 @@ class CreateBatchPredictionJobRequest(proto.Message): parent = proto.Field(proto.STRING, number=1) batch_prediction_job = proto.Field( - proto.MESSAGE, - number=2, - message=gca_batch_prediction_job.BatchPredictionJob, + proto.MESSAGE, number=2, message=gca_batch_prediction_job.BatchPredictionJob, ) @@ -584,11 +560,7 @@ class ListBatchPredictionJobsRequest(proto.Message): page_token = proto.Field(proto.STRING, number=4) - read_mask = proto.Field( - proto.MESSAGE, - number=5, - message=field_mask.FieldMask, - ) + read_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask.FieldMask,) class ListBatchPredictionJobsResponse(proto.Message): @@ -610,9 +582,7 @@ def raw_page(self): return self batch_prediction_jobs = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_batch_prediction_job.BatchPredictionJob, + proto.MESSAGE, number=1, message=gca_batch_prediction_job.BatchPredictionJob, ) next_page_token = proto.Field(proto.STRING, number=2) diff --git a/google/cloud/aiplatform_v1beta1/types/job_state.py b/google/cloud/aiplatform_v1beta1/types/job_state.py index 2baf9e447d..f86e179b1b 100644 --- a/google/cloud/aiplatform_v1beta1/types/job_state.py +++ b/google/cloud/aiplatform_v1beta1/types/job_state.py @@ -19,10 +19,7 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", - manifest={ - "JobState", - }, + package="google.cloud.aiplatform.v1beta1", manifest={"JobState",}, ) diff --git a/google/cloud/aiplatform_v1beta1/types/machine_resources.py b/google/cloud/aiplatform_v1beta1/types/machine_resources.py index eefaa7240e..c71aca024e 100644 --- a/google/cloud/aiplatform_v1beta1/types/machine_resources.py +++ b/google/cloud/aiplatform_v1beta1/types/machine_resources.py @@ -92,9 +92,7 @@ class MachineSpec(proto.Message): machine_type = proto.Field(proto.STRING, number=1) accelerator_type = proto.Field( - proto.ENUM, - number=2, - enum=gca_accelerator_type.AcceleratorType, + proto.ENUM, number=2, enum=gca_accelerator_type.AcceleratorType, ) accelerator_count = proto.Field(proto.INT32, number=3) @@ -133,11 +131,7 @@ class DedicatedResources(proto.Message): as the default value. """ - machine_spec = proto.Field( - proto.MESSAGE, - number=1, - message="MachineSpec", - ) + machine_spec = proto.Field(proto.MESSAGE, number=1, message="MachineSpec",) min_replica_count = proto.Field(proto.INT32, number=2) @@ -201,11 +195,7 @@ class BatchDedicatedResources(proto.Message): The default value is 10. """ - machine_spec = proto.Field( - proto.MESSAGE, - number=1, - message="MachineSpec", - ) + machine_spec = proto.Field(proto.MESSAGE, number=1, message="MachineSpec",) starting_replica_count = proto.Field(proto.INT32, number=2) diff --git a/google/cloud/aiplatform_v1beta1/types/manual_batch_tuning_parameters.py b/google/cloud/aiplatform_v1beta1/types/manual_batch_tuning_parameters.py index 849c24b16c..7a467d5069 100644 --- a/google/cloud/aiplatform_v1beta1/types/manual_batch_tuning_parameters.py +++ b/google/cloud/aiplatform_v1beta1/types/manual_batch_tuning_parameters.py @@ -20,9 +20,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1", - manifest={ - "ManualBatchTuningParameters", - }, + manifest={"ManualBatchTuningParameters",}, ) diff --git a/google/cloud/aiplatform_v1beta1/types/migratable_resource.py b/google/cloud/aiplatform_v1beta1/types/migratable_resource.py index 689994f38e..99a6e65a42 100644 --- a/google/cloud/aiplatform_v1beta1/types/migratable_resource.py +++ b/google/cloud/aiplatform_v1beta1/types/migratable_resource.py @@ -22,10 +22,7 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", - manifest={ - "MigratableResource", - }, + package="google.cloud.aiplatform.v1beta1", manifest={"MigratableResource",}, ) @@ -155,43 +152,27 @@ class DataLabelingAnnotatedDataset(proto.Message): ) ml_engine_model_version = proto.Field( - proto.MESSAGE, - number=1, - oneof="resource", - message=MlEngineModelVersion, + proto.MESSAGE, number=1, oneof="resource", message=MlEngineModelVersion, ) automl_model = proto.Field( - proto.MESSAGE, - number=2, - oneof="resource", - message=AutomlModel, + proto.MESSAGE, number=2, oneof="resource", message=AutomlModel, ) automl_dataset = proto.Field( - proto.MESSAGE, - number=3, - oneof="resource", - message=AutomlDataset, + proto.MESSAGE, number=3, oneof="resource", message=AutomlDataset, ) data_labeling_dataset = proto.Field( - proto.MESSAGE, - number=4, - oneof="resource", - message=DataLabelingDataset, + proto.MESSAGE, number=4, oneof="resource", message=DataLabelingDataset, ) last_migrate_time = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp.Timestamp, + proto.MESSAGE, number=5, message=timestamp.Timestamp, ) last_update_time = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp.Timestamp, + proto.MESSAGE, number=6, message=timestamp.Timestamp, ) diff --git a/google/cloud/aiplatform_v1beta1/types/migration_service.py b/google/cloud/aiplatform_v1beta1/types/migration_service.py index cd31a3283c..46b0cdc66b 100644 --- a/google/cloud/aiplatform_v1beta1/types/migration_service.py +++ b/google/cloud/aiplatform_v1beta1/types/migration_service.py @@ -82,9 +82,7 @@ def raw_page(self): return self migratable_resources = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_migratable_resource.MigratableResource, + proto.MESSAGE, number=1, message=gca_migratable_resource.MigratableResource, ) next_page_token = proto.Field(proto.STRING, number=2) @@ -108,9 +106,7 @@ class BatchMigrateResourcesRequest(proto.Message): parent = proto.Field(proto.STRING, number=1) migrate_resource_requests = proto.RepeatedField( - proto.MESSAGE, - number=2, - message="MigrateResourceRequest", + proto.MESSAGE, number=2, message="MigrateResourceRequest", ) @@ -255,17 +251,11 @@ class MigrateDataLabelingAnnotatedDatasetConfig(proto.Message): ) migrate_automl_model_config = proto.Field( - proto.MESSAGE, - number=2, - oneof="request", - message=MigrateAutomlModelConfig, + proto.MESSAGE, number=2, oneof="request", message=MigrateAutomlModelConfig, ) migrate_automl_dataset_config = proto.Field( - proto.MESSAGE, - number=3, - oneof="request", - message=MigrateAutomlDatasetConfig, + proto.MESSAGE, number=3, oneof="request", message=MigrateAutomlDatasetConfig, ) migrate_data_labeling_dataset_config = proto.Field( @@ -286,9 +276,7 @@ class BatchMigrateResourcesResponse(proto.Message): """ migrate_resource_responses = proto.RepeatedField( - proto.MESSAGE, - number=1, - message="MigrateResourceResponse", + proto.MESSAGE, number=1, message="MigrateResourceResponse", ) @@ -311,9 +299,7 @@ class MigrateResourceResponse(proto.Message): model = proto.Field(proto.STRING, number=2, oneof="migrated_resource") migratable_resource = proto.Field( - proto.MESSAGE, - number=3, - message=gca_migratable_resource.MigratableResource, + proto.MESSAGE, number=3, message=gca_migratable_resource.MigratableResource, ) @@ -327,9 +313,7 @@ class BatchMigrateResourcesOperationMetadata(proto.Message): """ generic_metadata = proto.Field( - proto.MESSAGE, - number=1, - message=operation.GenericOperationMetadata, + proto.MESSAGE, number=1, message=operation.GenericOperationMetadata, ) diff --git a/google/cloud/aiplatform_v1beta1/types/model.py b/google/cloud/aiplatform_v1beta1/types/model.py index 08528748dc..21e8c41034 100644 --- a/google/cloud/aiplatform_v1beta1/types/model.py +++ b/google/cloud/aiplatform_v1beta1/types/model.py @@ -27,12 +27,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1", - manifest={ - "Model", - "PredictSchemata", - "ModelContainerSpec", - "Port", - }, + manifest={"Model", "PredictSchemata", "ModelContainerSpec", "Port",}, ) @@ -279,9 +274,7 @@ class ExportableContent(proto.Enum): id = proto.Field(proto.STRING, number=1) exportable_contents = proto.RepeatedField( - proto.ENUM, - number=2, - enum="Model.ExportFormat.ExportableContent", + proto.ENUM, number=2, enum="Model.ExportFormat.ExportableContent", ) name = proto.Field(proto.STRING, number=1) @@ -290,68 +283,40 @@ class ExportableContent(proto.Enum): description = proto.Field(proto.STRING, number=3) - predict_schemata = proto.Field( - proto.MESSAGE, - number=4, - message="PredictSchemata", - ) + predict_schemata = proto.Field(proto.MESSAGE, number=4, message="PredictSchemata",) metadata_schema_uri = proto.Field(proto.STRING, number=5) - metadata = proto.Field( - proto.MESSAGE, - number=6, - message=struct.Value, - ) + metadata = proto.Field(proto.MESSAGE, number=6, message=struct.Value,) supported_export_formats = proto.RepeatedField( - proto.MESSAGE, - number=20, - message=ExportFormat, + proto.MESSAGE, number=20, message=ExportFormat, ) training_pipeline = proto.Field(proto.STRING, number=7) - container_spec = proto.Field( - proto.MESSAGE, - number=9, - message="ModelContainerSpec", - ) + container_spec = proto.Field(proto.MESSAGE, number=9, message="ModelContainerSpec",) artifact_uri = proto.Field(proto.STRING, number=26) supported_deployment_resources_types = proto.RepeatedField( - proto.ENUM, - number=10, - enum=DeploymentResourcesType, + proto.ENUM, number=10, enum=DeploymentResourcesType, ) supported_input_storage_formats = proto.RepeatedField(proto.STRING, number=11) supported_output_storage_formats = proto.RepeatedField(proto.STRING, number=12) - create_time = proto.Field( - proto.MESSAGE, - number=13, - message=timestamp.Timestamp, - ) + create_time = proto.Field(proto.MESSAGE, number=13, message=timestamp.Timestamp,) - update_time = proto.Field( - proto.MESSAGE, - number=14, - message=timestamp.Timestamp, - ) + update_time = proto.Field(proto.MESSAGE, number=14, message=timestamp.Timestamp,) deployed_models = proto.RepeatedField( - proto.MESSAGE, - number=15, - message=deployed_model_ref.DeployedModelRef, + proto.MESSAGE, number=15, message=deployed_model_ref.DeployedModelRef, ) explanation_spec = proto.Field( - proto.MESSAGE, - number=23, - message=explanation.ExplanationSpec, + proto.MESSAGE, number=23, message=explanation.ExplanationSpec, ) etag = proto.Field(proto.STRING, number=16) @@ -658,17 +623,9 @@ class ModelContainerSpec(proto.Message): args = proto.RepeatedField(proto.STRING, number=3) - env = proto.RepeatedField( - proto.MESSAGE, - number=4, - message=env_var.EnvVar, - ) + env = proto.RepeatedField(proto.MESSAGE, number=4, message=env_var.EnvVar,) - ports = proto.RepeatedField( - proto.MESSAGE, - number=5, - message="Port", - ) + ports = proto.RepeatedField(proto.MESSAGE, number=5, message="Port",) predict_route = proto.Field(proto.STRING, number=6) diff --git a/google/cloud/aiplatform_v1beta1/types/model_evaluation.py b/google/cloud/aiplatform_v1beta1/types/model_evaluation.py index 7a55d1e7fc..b768ed978e 100644 --- a/google/cloud/aiplatform_v1beta1/types/model_evaluation.py +++ b/google/cloud/aiplatform_v1beta1/types/model_evaluation.py @@ -24,10 +24,7 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", - manifest={ - "ModelEvaluation", - }, + package="google.cloud.aiplatform.v1beta1", manifest={"ModelEvaluation",}, ) @@ -74,24 +71,14 @@ class ModelEvaluation(proto.Message): metrics_schema_uri = proto.Field(proto.STRING, number=2) - metrics = proto.Field( - proto.MESSAGE, - number=3, - message=struct.Value, - ) + metrics = proto.Field(proto.MESSAGE, number=3, message=struct.Value,) - create_time = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp.Timestamp, - ) + create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) slice_dimensions = proto.RepeatedField(proto.STRING, number=5) model_explanation = proto.Field( - proto.MESSAGE, - number=8, - message=explanation.ModelExplanation, + proto.MESSAGE, number=8, message=explanation.ModelExplanation, ) diff --git a/google/cloud/aiplatform_v1beta1/types/model_evaluation_slice.py b/google/cloud/aiplatform_v1beta1/types/model_evaluation_slice.py index af37ef736c..1039d32c1f 100644 --- a/google/cloud/aiplatform_v1beta1/types/model_evaluation_slice.py +++ b/google/cloud/aiplatform_v1beta1/types/model_evaluation_slice.py @@ -23,10 +23,7 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", - manifest={ - "ModelEvaluationSlice", - }, + package="google.cloud.aiplatform.v1beta1", manifest={"ModelEvaluationSlice",}, ) @@ -82,25 +79,13 @@ class Slice(proto.Message): name = proto.Field(proto.STRING, number=1) - slice_ = proto.Field( - proto.MESSAGE, - number=2, - message=Slice, - ) + slice_ = proto.Field(proto.MESSAGE, number=2, message=Slice,) metrics_schema_uri = proto.Field(proto.STRING, number=3) - metrics = proto.Field( - proto.MESSAGE, - number=4, - message=struct.Value, - ) - - create_time = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp.Timestamp, - ) + metrics = proto.Field(proto.MESSAGE, number=4, message=struct.Value,) + + create_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/aiplatform_v1beta1/types/model_service.py b/google/cloud/aiplatform_v1beta1/types/model_service.py index 4b783e7fa8..3cfb17ad2c 100644 --- a/google/cloud/aiplatform_v1beta1/types/model_service.py +++ b/google/cloud/aiplatform_v1beta1/types/model_service.py @@ -65,11 +65,7 @@ class UploadModelRequest(proto.Message): parent = proto.Field(proto.STRING, number=1) - model = proto.Field( - proto.MESSAGE, - number=2, - message=gca_model.Model, - ) + model = proto.Field(proto.MESSAGE, number=2, message=gca_model.Model,) class UploadModelOperationMetadata(proto.Message): @@ -83,9 +79,7 @@ class UploadModelOperationMetadata(proto.Message): """ generic_metadata = proto.Field( - proto.MESSAGE, - number=1, - message=operation.GenericOperationMetadata, + proto.MESSAGE, number=1, message=operation.GenericOperationMetadata, ) @@ -147,11 +141,7 @@ class ListModelsRequest(proto.Message): page_token = proto.Field(proto.STRING, number=4) - read_mask = proto.Field( - proto.MESSAGE, - number=5, - message=field_mask.FieldMask, - ) + read_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask.FieldMask,) class ListModelsResponse(proto.Message): @@ -171,11 +161,7 @@ class ListModelsResponse(proto.Message): def raw_page(self): return self - models = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_model.Model, - ) + models = proto.RepeatedField(proto.MESSAGE, number=1, message=gca_model.Model,) next_page_token = proto.Field(proto.STRING, number=2) @@ -195,17 +181,9 @@ class UpdateModelRequest(proto.Message): [FieldMask](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask). """ - model = proto.Field( - proto.MESSAGE, - number=1, - message=gca_model.Model, - ) + model = proto.Field(proto.MESSAGE, number=1, message=gca_model.Model,) - update_mask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask.FieldMask, - ) + update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) class DeleteModelRequest(proto.Message): @@ -267,24 +245,16 @@ class OutputConfig(proto.Message): export_format_id = proto.Field(proto.STRING, number=1) artifact_destination = proto.Field( - proto.MESSAGE, - number=3, - message=io.GcsDestination, + proto.MESSAGE, number=3, message=io.GcsDestination, ) image_destination = proto.Field( - proto.MESSAGE, - number=4, - message=io.ContainerRegistryDestination, + proto.MESSAGE, number=4, message=io.ContainerRegistryDestination, ) name = proto.Field(proto.STRING, number=1) - output_config = proto.Field( - proto.MESSAGE, - number=2, - message=OutputConfig, - ) + output_config = proto.Field(proto.MESSAGE, number=2, message=OutputConfig,) class ExportModelOperationMetadata(proto.Message): @@ -322,16 +292,10 @@ class OutputInfo(proto.Message): image_output_uri = proto.Field(proto.STRING, number=3) generic_metadata = proto.Field( - proto.MESSAGE, - number=1, - message=operation.GenericOperationMetadata, + proto.MESSAGE, number=1, message=operation.GenericOperationMetadata, ) - output_info = proto.Field( - proto.MESSAGE, - number=2, - message=OutputInfo, - ) + output_info = proto.Field(proto.MESSAGE, number=2, message=OutputInfo,) class ExportModelResponse(proto.Message): @@ -386,11 +350,7 @@ class ListModelEvaluationsRequest(proto.Message): page_token = proto.Field(proto.STRING, number=4) - read_mask = proto.Field( - proto.MESSAGE, - number=5, - message=field_mask.FieldMask, - ) + read_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask.FieldMask,) class ListModelEvaluationsResponse(proto.Message): @@ -412,9 +372,7 @@ def raw_page(self): return self model_evaluations = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=model_evaluation.ModelEvaluation, + proto.MESSAGE, number=1, message=model_evaluation.ModelEvaluation, ) next_page_token = proto.Field(proto.STRING, number=2) @@ -469,11 +427,7 @@ class ListModelEvaluationSlicesRequest(proto.Message): page_token = proto.Field(proto.STRING, number=4) - read_mask = proto.Field( - proto.MESSAGE, - number=5, - message=field_mask.FieldMask, - ) + read_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask.FieldMask,) class ListModelEvaluationSlicesResponse(proto.Message): @@ -495,9 +449,7 @@ def raw_page(self): return self model_evaluation_slices = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=model_evaluation_slice.ModelEvaluationSlice, + proto.MESSAGE, number=1, message=model_evaluation_slice.ModelEvaluationSlice, ) next_page_token = proto.Field(proto.STRING, number=2) diff --git a/google/cloud/aiplatform_v1beta1/types/operation.py b/google/cloud/aiplatform_v1beta1/types/operation.py index c9d084cbfa..68fb0daead 100644 --- a/google/cloud/aiplatform_v1beta1/types/operation.py +++ b/google/cloud/aiplatform_v1beta1/types/operation.py @@ -24,10 +24,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1", - manifest={ - "GenericOperationMetadata", - "DeleteOperationMetadata", - }, + manifest={"GenericOperationMetadata", "DeleteOperationMetadata",}, ) @@ -52,22 +49,12 @@ class GenericOperationMetadata(proto.Message): """ partial_failures = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=status.Status, + proto.MESSAGE, number=1, message=status.Status, ) - create_time = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp.Timestamp, - ) + create_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) - update_time = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp.Timestamp, - ) + update_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,) class DeleteOperationMetadata(proto.Message): @@ -79,9 +66,7 @@ class DeleteOperationMetadata(proto.Message): """ generic_metadata = proto.Field( - proto.MESSAGE, - number=1, - message="GenericOperationMetadata", + proto.MESSAGE, number=1, message="GenericOperationMetadata", ) diff --git a/google/cloud/aiplatform_v1beta1/types/pipeline_service.py b/google/cloud/aiplatform_v1beta1/types/pipeline_service.py index 208ed5006a..9f0856732d 100644 --- a/google/cloud/aiplatform_v1beta1/types/pipeline_service.py +++ b/google/cloud/aiplatform_v1beta1/types/pipeline_service.py @@ -53,9 +53,7 @@ class CreateTrainingPipelineRequest(proto.Message): parent = proto.Field(proto.STRING, number=1) training_pipeline = proto.Field( - proto.MESSAGE, - number=2, - message=gca_training_pipeline.TrainingPipeline, + proto.MESSAGE, number=2, message=gca_training_pipeline.TrainingPipeline, ) @@ -118,11 +116,7 @@ class ListTrainingPipelinesRequest(proto.Message): page_token = proto.Field(proto.STRING, number=4) - read_mask = proto.Field( - proto.MESSAGE, - number=5, - message=field_mask.FieldMask, - ) + read_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask.FieldMask,) class ListTrainingPipelinesResponse(proto.Message): @@ -144,9 +138,7 @@ def raw_page(self): return self training_pipelines = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_training_pipeline.TrainingPipeline, + proto.MESSAGE, number=1, message=gca_training_pipeline.TrainingPipeline, ) next_page_token = proto.Field(proto.STRING, number=2) diff --git a/google/cloud/aiplatform_v1beta1/types/pipeline_state.py b/google/cloud/aiplatform_v1beta1/types/pipeline_state.py index 9c52592838..cede653bd6 100644 --- a/google/cloud/aiplatform_v1beta1/types/pipeline_state.py +++ b/google/cloud/aiplatform_v1beta1/types/pipeline_state.py @@ -19,10 +19,7 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", - manifest={ - "PipelineState", - }, + package="google.cloud.aiplatform.v1beta1", manifest={"PipelineState",}, ) diff --git a/google/cloud/aiplatform_v1beta1/types/prediction_service.py b/google/cloud/aiplatform_v1beta1/types/prediction_service.py index 3e5f8d7be8..b000f88bf8 100644 --- a/google/cloud/aiplatform_v1beta1/types/prediction_service.py +++ b/google/cloud/aiplatform_v1beta1/types/prediction_service.py @@ -65,17 +65,9 @@ class PredictRequest(proto.Message): endpoint = proto.Field(proto.STRING, number=1) - instances = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=struct.Value, - ) + instances = proto.RepeatedField(proto.MESSAGE, number=2, message=struct.Value,) - parameters = proto.Field( - proto.MESSAGE, - number=3, - message=struct.Value, - ) + parameters = proto.Field(proto.MESSAGE, number=3, message=struct.Value,) class PredictResponse(proto.Message): @@ -95,11 +87,7 @@ class PredictResponse(proto.Message): served this prediction. """ - predictions = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=struct.Value, - ) + predictions = proto.RepeatedField(proto.MESSAGE, number=1, message=struct.Value,) deployed_model_id = proto.Field(proto.STRING, number=2) @@ -140,17 +128,9 @@ class ExplainRequest(proto.Message): endpoint = proto.Field(proto.STRING, number=1) - instances = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=struct.Value, - ) + instances = proto.RepeatedField(proto.MESSAGE, number=2, message=struct.Value,) - parameters = proto.Field( - proto.MESSAGE, - number=4, - message=struct.Value, - ) + parameters = proto.Field(proto.MESSAGE, number=4, message=struct.Value,) deployed_model_id = proto.Field(proto.STRING, number=3) @@ -177,18 +157,12 @@ class ExplainResponse(proto.Message): """ explanations = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=explanation.Explanation, + proto.MESSAGE, number=1, message=explanation.Explanation, ) deployed_model_id = proto.Field(proto.STRING, number=2) - predictions = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=struct.Value, - ) + predictions = proto.RepeatedField(proto.MESSAGE, number=3, message=struct.Value,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/aiplatform_v1beta1/types/specialist_pool.py b/google/cloud/aiplatform_v1beta1/types/specialist_pool.py index 9b23b5c3c1..4ac8c6a709 100644 --- a/google/cloud/aiplatform_v1beta1/types/specialist_pool.py +++ b/google/cloud/aiplatform_v1beta1/types/specialist_pool.py @@ -19,10 +19,7 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", - manifest={ - "SpecialistPool", - }, + package="google.cloud.aiplatform.v1beta1", manifest={"SpecialistPool",}, ) diff --git a/google/cloud/aiplatform_v1beta1/types/specialist_pool_service.py b/google/cloud/aiplatform_v1beta1/types/specialist_pool_service.py index 811ac554ce..724f7165a6 100644 --- a/google/cloud/aiplatform_v1beta1/types/specialist_pool_service.py +++ b/google/cloud/aiplatform_v1beta1/types/specialist_pool_service.py @@ -54,9 +54,7 @@ class CreateSpecialistPoolRequest(proto.Message): parent = proto.Field(proto.STRING, number=1) specialist_pool = proto.Field( - proto.MESSAGE, - number=2, - message=gca_specialist_pool.SpecialistPool, + proto.MESSAGE, number=2, message=gca_specialist_pool.SpecialistPool, ) @@ -70,9 +68,7 @@ class CreateSpecialistPoolOperationMetadata(proto.Message): """ generic_metadata = proto.Field( - proto.MESSAGE, - number=1, - message=operation.GenericOperationMetadata, + proto.MESSAGE, number=1, message=operation.GenericOperationMetadata, ) @@ -118,11 +114,7 @@ class ListSpecialistPoolsRequest(proto.Message): page_token = proto.Field(proto.STRING, number=3) - read_mask = proto.Field( - proto.MESSAGE, - number=4, - message=field_mask.FieldMask, - ) + read_mask = proto.Field(proto.MESSAGE, number=4, message=field_mask.FieldMask,) class ListSpecialistPoolsResponse(proto.Message): @@ -142,9 +134,7 @@ def raw_page(self): return self specialist_pools = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_specialist_pool.SpecialistPool, + proto.MESSAGE, number=1, message=gca_specialist_pool.SpecialistPool, ) next_page_token = proto.Field(proto.STRING, number=2) @@ -185,16 +175,10 @@ class UpdateSpecialistPoolRequest(proto.Message): """ specialist_pool = proto.Field( - proto.MESSAGE, - number=1, - message=gca_specialist_pool.SpecialistPool, + proto.MESSAGE, number=1, message=gca_specialist_pool.SpecialistPool, ) - update_mask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask.FieldMask, - ) + update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) class UpdateSpecialistPoolOperationMetadata(proto.Message): @@ -214,9 +198,7 @@ class UpdateSpecialistPoolOperationMetadata(proto.Message): specialist_pool = proto.Field(proto.STRING, number=1) generic_metadata = proto.Field( - proto.MESSAGE, - number=2, - message=operation.GenericOperationMetadata, + proto.MESSAGE, number=2, message=operation.GenericOperationMetadata, ) diff --git a/google/cloud/aiplatform_v1beta1/types/study.py b/google/cloud/aiplatform_v1beta1/types/study.py index 06abf97ac1..2d6f4ae8c3 100644 --- a/google/cloud/aiplatform_v1beta1/types/study.py +++ b/google/cloud/aiplatform_v1beta1/types/study.py @@ -24,11 +24,7 @@ __protobuf__ = proto.module( package="google.cloud.aiplatform.v1beta1", - manifest={ - "Trial", - "StudySpec", - "Measurement", - }, + manifest={"Trial", "StudySpec", "Measurement",}, ) @@ -86,43 +82,19 @@ class Parameter(proto.Message): parameter_id = proto.Field(proto.STRING, number=1) - value = proto.Field( - proto.MESSAGE, - number=2, - message=struct.Value, - ) + value = proto.Field(proto.MESSAGE, number=2, message=struct.Value,) id = proto.Field(proto.STRING, number=2) - state = proto.Field( - proto.ENUM, - number=3, - enum=State, - ) - - parameters = proto.RepeatedField( - proto.MESSAGE, - number=4, - message=Parameter, - ) - - final_measurement = proto.Field( - proto.MESSAGE, - number=5, - message="Measurement", - ) - - start_time = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp.Timestamp, - ) - - end_time = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp.Timestamp, - ) + state = proto.Field(proto.ENUM, number=3, enum=State,) + + parameters = proto.RepeatedField(proto.MESSAGE, number=4, message=Parameter,) + + final_measurement = proto.Field(proto.MESSAGE, number=5, message="Measurement",) + + start_time = proto.Field(proto.MESSAGE, number=7, message=timestamp.Timestamp,) + + end_time = proto.Field(proto.MESSAGE, number=8, message=timestamp.Timestamp,) custom_job = proto.Field(proto.STRING, number=11) @@ -166,11 +138,7 @@ class GoalType(proto.Enum): metric_id = proto.Field(proto.STRING, number=1) - goal = proto.Field( - proto.ENUM, - number=2, - enum="StudySpec.MetricSpec.GoalType", - ) + goal = proto.Field(proto.ENUM, number=2, enum="StudySpec.MetricSpec.GoalType",) class ParameterSpec(proto.Message): r"""Represents a single parameter to optimize. @@ -345,9 +313,7 @@ class CategoricalValueCondition(proto.Message): ) parameter_spec = proto.Field( - proto.MESSAGE, - number=1, - message="StudySpec.ParameterSpec", + proto.MESSAGE, number=1, message="StudySpec.ParameterSpec", ) double_value_spec = proto.Field( @@ -381,9 +347,7 @@ class CategoricalValueCondition(proto.Message): parameter_id = proto.Field(proto.STRING, number=1) scale_type = proto.Field( - proto.ENUM, - number=6, - enum="StudySpec.ParameterSpec.ScaleType", + proto.ENUM, number=6, enum="StudySpec.ParameterSpec.ScaleType", ) conditional_parameter_specs = proto.RepeatedField( @@ -392,23 +356,11 @@ class CategoricalValueCondition(proto.Message): message="StudySpec.ParameterSpec.ConditionalParameterSpec", ) - metrics = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=MetricSpec, - ) + metrics = proto.RepeatedField(proto.MESSAGE, number=1, message=MetricSpec,) - parameters = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=ParameterSpec, - ) + parameters = proto.RepeatedField(proto.MESSAGE, number=2, message=ParameterSpec,) - algorithm = proto.Field( - proto.ENUM, - number=3, - enum=Algorithm, - ) + algorithm = proto.Field(proto.ENUM, number=3, enum=Algorithm,) class Measurement(proto.Message): @@ -445,11 +397,7 @@ class Metric(proto.Message): step_count = proto.Field(proto.INT64, number=2) - metrics = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=Metric, - ) + metrics = proto.RepeatedField(proto.MESSAGE, number=3, message=Metric,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/aiplatform_v1beta1/types/training_pipeline.py b/google/cloud/aiplatform_v1beta1/types/training_pipeline.py index 64ef852c5e..f1f0debaf9 100644 --- a/google/cloud/aiplatform_v1beta1/types/training_pipeline.py +++ b/google/cloud/aiplatform_v1beta1/types/training_pipeline.py @@ -146,67 +146,27 @@ class TrainingPipeline(proto.Message): display_name = proto.Field(proto.STRING, number=2) - input_data_config = proto.Field( - proto.MESSAGE, - number=3, - message="InputDataConfig", - ) + input_data_config = proto.Field(proto.MESSAGE, number=3, message="InputDataConfig",) training_task_definition = proto.Field(proto.STRING, number=4) - training_task_inputs = proto.Field( - proto.MESSAGE, - number=5, - message=struct.Value, - ) + training_task_inputs = proto.Field(proto.MESSAGE, number=5, message=struct.Value,) - training_task_metadata = proto.Field( - proto.MESSAGE, - number=6, - message=struct.Value, - ) + training_task_metadata = proto.Field(proto.MESSAGE, number=6, message=struct.Value,) - model_to_upload = proto.Field( - proto.MESSAGE, - number=7, - message=model.Model, - ) + model_to_upload = proto.Field(proto.MESSAGE, number=7, message=model.Model,) - state = proto.Field( - proto.ENUM, - number=9, - enum=pipeline_state.PipelineState, - ) + state = proto.Field(proto.ENUM, number=9, enum=pipeline_state.PipelineState,) - error = proto.Field( - proto.MESSAGE, - number=10, - message=status.Status, - ) + error = proto.Field(proto.MESSAGE, number=10, message=status.Status,) - create_time = proto.Field( - proto.MESSAGE, - number=11, - message=timestamp.Timestamp, - ) + create_time = proto.Field(proto.MESSAGE, number=11, message=timestamp.Timestamp,) - start_time = proto.Field( - proto.MESSAGE, - number=12, - message=timestamp.Timestamp, - ) + start_time = proto.Field(proto.MESSAGE, number=12, message=timestamp.Timestamp,) - end_time = proto.Field( - proto.MESSAGE, - number=13, - message=timestamp.Timestamp, - ) + end_time = proto.Field(proto.MESSAGE, number=13, message=timestamp.Timestamp,) - update_time = proto.Field( - proto.MESSAGE, - number=14, - message=timestamp.Timestamp, - ) + update_time = proto.Field(proto.MESSAGE, number=14, message=timestamp.Timestamp,) labels = proto.MapField(proto.STRING, proto.STRING, number=15) @@ -327,45 +287,27 @@ class InputDataConfig(proto.Message): """ fraction_split = proto.Field( - proto.MESSAGE, - number=2, - oneof="split", - message="FractionSplit", + proto.MESSAGE, number=2, oneof="split", message="FractionSplit", ) filter_split = proto.Field( - proto.MESSAGE, - number=3, - oneof="split", - message="FilterSplit", + proto.MESSAGE, number=3, oneof="split", message="FilterSplit", ) predefined_split = proto.Field( - proto.MESSAGE, - number=4, - oneof="split", - message="PredefinedSplit", + proto.MESSAGE, number=4, oneof="split", message="PredefinedSplit", ) timestamp_split = proto.Field( - proto.MESSAGE, - number=5, - oneof="split", - message="TimestampSplit", + proto.MESSAGE, number=5, oneof="split", message="TimestampSplit", ) gcs_destination = proto.Field( - proto.MESSAGE, - number=8, - oneof="destination", - message=io.GcsDestination, + proto.MESSAGE, number=8, oneof="destination", message=io.GcsDestination, ) bigquery_destination = proto.Field( - proto.MESSAGE, - number=10, - oneof="destination", - message=io.BigQueryDestination, + proto.MESSAGE, number=10, oneof="destination", message=io.BigQueryDestination, ) dataset_id = proto.Field(proto.STRING, number=1) diff --git a/google/cloud/aiplatform_v1beta1/types/user_action_reference.py b/google/cloud/aiplatform_v1beta1/types/user_action_reference.py index 742ba69127..710e4a6d16 100644 --- a/google/cloud/aiplatform_v1beta1/types/user_action_reference.py +++ b/google/cloud/aiplatform_v1beta1/types/user_action_reference.py @@ -19,10 +19,7 @@ __protobuf__ = proto.module( - package="google.cloud.aiplatform.v1beta1", - manifest={ - "UserActionReference", - }, + package="google.cloud.aiplatform.v1beta1", manifest={"UserActionReference",}, ) diff --git a/noxfile.py b/noxfile.py index 295cac5eb5..87765339b5 100644 --- a/noxfile.py +++ b/noxfile.py @@ -40,9 +40,7 @@ def lint(session): """ session.install("flake8", BLACK_VERSION) session.run( - "black", - "--check", - *BLACK_PATHS, + "black", "--check", *BLACK_PATHS, ) session.run("flake8", "google", "tests") @@ -59,8 +57,7 @@ def blacken(session): """ session.install(BLACK_VERSION) session.run( - "black", - *BLACK_PATHS, + "black", *BLACK_PATHS, ) @@ -76,9 +73,7 @@ def default(session): session.install("asyncmock", "pytest-asyncio") session.install( - "mock", - "pytest", - "pytest-cov", + "mock", "pytest", "pytest-cov", ) session.install("-e", ".") @@ -128,9 +123,7 @@ def system(session): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. session.install( - "mock", - "pytest", - "google-cloud-testutils", + "mock", "pytest", "google-cloud-testutils", ) session.install("-e", ".") diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_dataset_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_dataset_service.py index 411933eca6..51022d9fb7 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_dataset_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_dataset_service.py @@ -391,9 +391,7 @@ def test_dataset_service_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) + options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) @@ -461,8 +459,7 @@ def test_create_dataset( transport: str = "grpc", request_type=dataset_service.CreateDatasetRequest ): client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -495,8 +492,7 @@ async def test_create_dataset_async( transport: str = "grpc_asyncio", request_type=dataset_service.CreateDatasetRequest ): client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -528,9 +524,7 @@ async def test_create_dataset_async_from_dict(): def test_create_dataset_field_headers(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -550,17 +544,12 @@ def test_create_dataset_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_create_dataset_field_headers_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -582,16 +571,11 @@ async def test_create_dataset_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_create_dataset_flattened(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_dataset), "__call__") as call: @@ -601,8 +585,7 @@ def test_create_dataset_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_dataset( - parent="parent_value", - dataset=gca_dataset.Dataset(name="name_value"), + parent="parent_value", dataset=gca_dataset.Dataset(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -616,9 +599,7 @@ def test_create_dataset_flattened(): def test_create_dataset_flattened_error(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -632,9 +613,7 @@ def test_create_dataset_flattened_error(): @pytest.mark.asyncio async def test_create_dataset_flattened_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_dataset), "__call__") as call: @@ -647,8 +626,7 @@ async def test_create_dataset_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_dataset( - parent="parent_value", - dataset=gca_dataset.Dataset(name="name_value"), + parent="parent_value", dataset=gca_dataset.Dataset(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -663,9 +641,7 @@ async def test_create_dataset_flattened_async(): @pytest.mark.asyncio async def test_create_dataset_flattened_error_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -681,8 +657,7 @@ def test_get_dataset( transport: str = "grpc", request_type=dataset_service.GetDatasetRequest ): client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -729,8 +704,7 @@ async def test_get_dataset_async( transport: str = "grpc_asyncio", request_type=dataset_service.GetDatasetRequest ): client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -775,9 +749,7 @@ async def test_get_dataset_async_from_dict(): def test_get_dataset_field_headers(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -797,17 +769,12 @@ def test_get_dataset_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_dataset_field_headers_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -827,16 +794,11 @@ async def test_get_dataset_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_dataset_flattened(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_dataset), "__call__") as call: @@ -845,9 +807,7 @@ def test_get_dataset_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_dataset( - name="name_value", - ) + client.get_dataset(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -858,24 +818,19 @@ def test_get_dataset_flattened(): def test_get_dataset_flattened_error(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_dataset( - dataset_service.GetDatasetRequest(), - name="name_value", + dataset_service.GetDatasetRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_dataset_flattened_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_dataset), "__call__") as call: @@ -885,9 +840,7 @@ async def test_get_dataset_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataset.Dataset()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_dataset( - name="name_value", - ) + response = await client.get_dataset(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -899,16 +852,13 @@ async def test_get_dataset_flattened_async(): @pytest.mark.asyncio async def test_get_dataset_flattened_error_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_dataset( - dataset_service.GetDatasetRequest(), - name="name_value", + dataset_service.GetDatasetRequest(), name="name_value", ) @@ -916,8 +866,7 @@ def test_update_dataset( transport: str = "grpc", request_type=dataset_service.UpdateDatasetRequest ): client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -964,8 +913,7 @@ async def test_update_dataset_async( transport: str = "grpc_asyncio", request_type=dataset_service.UpdateDatasetRequest ): client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1010,9 +958,7 @@ async def test_update_dataset_async_from_dict(): def test_update_dataset_field_headers(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1032,17 +978,14 @@ def test_update_dataset_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "dataset.name=dataset.name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "dataset.name=dataset.name/value",) in kw[ + "metadata" + ] @pytest.mark.asyncio async def test_update_dataset_field_headers_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1062,16 +1005,13 @@ async def test_update_dataset_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "dataset.name=dataset.name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "dataset.name=dataset.name/value",) in kw[ + "metadata" + ] def test_update_dataset_flattened(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_dataset), "__call__") as call: @@ -1096,9 +1036,7 @@ def test_update_dataset_flattened(): def test_update_dataset_flattened_error(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1112,9 +1050,7 @@ def test_update_dataset_flattened_error(): @pytest.mark.asyncio async def test_update_dataset_flattened_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_dataset), "__call__") as call: @@ -1141,9 +1077,7 @@ async def test_update_dataset_flattened_async(): @pytest.mark.asyncio async def test_update_dataset_flattened_error_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1159,8 +1093,7 @@ def test_list_datasets( transport: str = "grpc", request_type=dataset_service.ListDatasetsRequest ): client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1198,8 +1131,7 @@ async def test_list_datasets_async( transport: str = "grpc_asyncio", request_type=dataset_service.ListDatasetsRequest ): client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1235,9 +1167,7 @@ async def test_list_datasets_async_from_dict(): def test_list_datasets_field_headers(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1257,17 +1187,12 @@ def test_list_datasets_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_datasets_field_headers_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1289,16 +1214,11 @@ async def test_list_datasets_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_datasets_flattened(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: @@ -1307,9 +1227,7 @@ def test_list_datasets_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_datasets( - parent="parent_value", - ) + client.list_datasets(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1320,24 +1238,19 @@ def test_list_datasets_flattened(): def test_list_datasets_flattened_error(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_datasets( - dataset_service.ListDatasetsRequest(), - parent="parent_value", + dataset_service.ListDatasetsRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_datasets_flattened_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: @@ -1349,9 +1262,7 @@ async def test_list_datasets_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_datasets( - parent="parent_value", - ) + response = await client.list_datasets(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1363,51 +1274,33 @@ async def test_list_datasets_flattened_async(): @pytest.mark.asyncio async def test_list_datasets_flattened_error_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_datasets( - dataset_service.ListDatasetsRequest(), - parent="parent_value", + dataset_service.ListDatasetsRequest(), parent="parent_value", ) def test_list_datasets_pager(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( dataset_service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - dataset.Dataset(), - ], + datasets=[dataset.Dataset(), dataset.Dataset(), dataset.Dataset(),], next_page_token="abc", ), + dataset_service.ListDatasetsResponse(datasets=[], next_page_token="def",), dataset_service.ListDatasetsResponse( - datasets=[], - next_page_token="def", - ), - dataset_service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - ], - next_page_token="ghi", + datasets=[dataset.Dataset(),], next_page_token="ghi", ), dataset_service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - ], + datasets=[dataset.Dataset(), dataset.Dataset(),], ), RuntimeError, ) @@ -1426,37 +1319,22 @@ def test_list_datasets_pager(): def test_list_datasets_pages(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_datasets), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( dataset_service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - dataset.Dataset(), - ], + datasets=[dataset.Dataset(), dataset.Dataset(), dataset.Dataset(),], next_page_token="abc", ), + dataset_service.ListDatasetsResponse(datasets=[], next_page_token="def",), dataset_service.ListDatasetsResponse( - datasets=[], - next_page_token="def", + datasets=[dataset.Dataset(),], next_page_token="ghi", ), dataset_service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - ], - next_page_token="ghi", - ), - dataset_service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - ], + datasets=[dataset.Dataset(), dataset.Dataset(),], ), RuntimeError, ) @@ -1467,9 +1345,7 @@ def test_list_datasets_pages(): @pytest.mark.asyncio async def test_list_datasets_async_pager(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1478,34 +1354,19 @@ async def test_list_datasets_async_pager(): # Set the response to a series of pages. call.side_effect = ( dataset_service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - dataset.Dataset(), - ], + datasets=[dataset.Dataset(), dataset.Dataset(), dataset.Dataset(),], next_page_token="abc", ), + dataset_service.ListDatasetsResponse(datasets=[], next_page_token="def",), dataset_service.ListDatasetsResponse( - datasets=[], - next_page_token="def", - ), - dataset_service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - ], - next_page_token="ghi", + datasets=[dataset.Dataset(),], next_page_token="ghi", ), dataset_service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - ], + datasets=[dataset.Dataset(), dataset.Dataset(),], ), RuntimeError, ) - async_pager = await client.list_datasets( - request={}, - ) + async_pager = await client.list_datasets(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -1517,9 +1378,7 @@ async def test_list_datasets_async_pager(): @pytest.mark.asyncio async def test_list_datasets_async_pages(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1528,28 +1387,15 @@ async def test_list_datasets_async_pages(): # Set the response to a series of pages. call.side_effect = ( dataset_service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - dataset.Dataset(), - ], + datasets=[dataset.Dataset(), dataset.Dataset(), dataset.Dataset(),], next_page_token="abc", ), + dataset_service.ListDatasetsResponse(datasets=[], next_page_token="def",), dataset_service.ListDatasetsResponse( - datasets=[], - next_page_token="def", + datasets=[dataset.Dataset(),], next_page_token="ghi", ), dataset_service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - ], - next_page_token="ghi", - ), - dataset_service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - ], + datasets=[dataset.Dataset(), dataset.Dataset(),], ), RuntimeError, ) @@ -1564,8 +1410,7 @@ def test_delete_dataset( transport: str = "grpc", request_type=dataset_service.DeleteDatasetRequest ): client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1598,8 +1443,7 @@ async def test_delete_dataset_async( transport: str = "grpc_asyncio", request_type=dataset_service.DeleteDatasetRequest ): client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1631,9 +1475,7 @@ async def test_delete_dataset_async_from_dict(): def test_delete_dataset_field_headers(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1653,17 +1495,12 @@ def test_delete_dataset_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_delete_dataset_field_headers_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1685,16 +1522,11 @@ async def test_delete_dataset_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_delete_dataset_flattened(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_dataset), "__call__") as call: @@ -1703,9 +1535,7 @@ def test_delete_dataset_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_dataset( - name="name_value", - ) + client.delete_dataset(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1716,24 +1546,19 @@ def test_delete_dataset_flattened(): def test_delete_dataset_flattened_error(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_dataset( - dataset_service.DeleteDatasetRequest(), - name="name_value", + dataset_service.DeleteDatasetRequest(), name="name_value", ) @pytest.mark.asyncio async def test_delete_dataset_flattened_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_dataset), "__call__") as call: @@ -1745,9 +1570,7 @@ async def test_delete_dataset_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_dataset( - name="name_value", - ) + response = await client.delete_dataset(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1759,16 +1582,13 @@ async def test_delete_dataset_flattened_async(): @pytest.mark.asyncio async def test_delete_dataset_flattened_error_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_dataset( - dataset_service.DeleteDatasetRequest(), - name="name_value", + dataset_service.DeleteDatasetRequest(), name="name_value", ) @@ -1776,8 +1596,7 @@ def test_import_data( transport: str = "grpc", request_type=dataset_service.ImportDataRequest ): client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1810,8 +1629,7 @@ async def test_import_data_async( transport: str = "grpc_asyncio", request_type=dataset_service.ImportDataRequest ): client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1843,9 +1661,7 @@ async def test_import_data_async_from_dict(): def test_import_data_field_headers(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1865,17 +1681,12 @@ def test_import_data_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_import_data_field_headers_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1897,16 +1708,11 @@ async def test_import_data_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_import_data_flattened(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.import_data), "__call__") as call: @@ -1935,9 +1741,7 @@ def test_import_data_flattened(): def test_import_data_flattened_error(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1953,9 +1757,7 @@ def test_import_data_flattened_error(): @pytest.mark.asyncio async def test_import_data_flattened_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.import_data), "__call__") as call: @@ -1988,9 +1790,7 @@ async def test_import_data_flattened_async(): @pytest.mark.asyncio async def test_import_data_flattened_error_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2008,8 +1808,7 @@ def test_export_data( transport: str = "grpc", request_type=dataset_service.ExportDataRequest ): client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2042,8 +1841,7 @@ async def test_export_data_async( transport: str = "grpc_asyncio", request_type=dataset_service.ExportDataRequest ): client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2075,9 +1873,7 @@ async def test_export_data_async_from_dict(): def test_export_data_field_headers(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2097,17 +1893,12 @@ def test_export_data_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_export_data_field_headers_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2129,16 +1920,11 @@ async def test_export_data_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_export_data_flattened(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.export_data), "__call__") as call: @@ -2171,9 +1957,7 @@ def test_export_data_flattened(): def test_export_data_flattened_error(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2191,9 +1975,7 @@ def test_export_data_flattened_error(): @pytest.mark.asyncio async def test_export_data_flattened_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.export_data), "__call__") as call: @@ -2230,9 +2012,7 @@ async def test_export_data_flattened_async(): @pytest.mark.asyncio async def test_export_data_flattened_error_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2252,8 +2032,7 @@ def test_list_data_items( transport: str = "grpc", request_type=dataset_service.ListDataItemsRequest ): client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2291,8 +2070,7 @@ async def test_list_data_items_async( transport: str = "grpc_asyncio", request_type=dataset_service.ListDataItemsRequest ): client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2328,9 +2106,7 @@ async def test_list_data_items_async_from_dict(): def test_list_data_items_field_headers(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2350,17 +2126,12 @@ def test_list_data_items_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_data_items_field_headers_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2382,16 +2153,11 @@ async def test_list_data_items_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_data_items_flattened(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_data_items), "__call__") as call: @@ -2400,9 +2166,7 @@ def test_list_data_items_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_data_items( - parent="parent_value", - ) + client.list_data_items(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -2413,24 +2177,19 @@ def test_list_data_items_flattened(): def test_list_data_items_flattened_error(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_data_items( - dataset_service.ListDataItemsRequest(), - parent="parent_value", + dataset_service.ListDataItemsRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_data_items_flattened_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_data_items), "__call__") as call: @@ -2442,9 +2201,7 @@ async def test_list_data_items_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_data_items( - parent="parent_value", - ) + response = await client.list_data_items(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -2456,23 +2213,18 @@ async def test_list_data_items_flattened_async(): @pytest.mark.asyncio async def test_list_data_items_flattened_error_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_data_items( - dataset_service.ListDataItemsRequest(), - parent="parent_value", + dataset_service.ListDataItemsRequest(), parent="parent_value", ) def test_list_data_items_pager(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_data_items), "__call__") as call: @@ -2487,20 +2239,13 @@ def test_list_data_items_pager(): next_page_token="abc", ), dataset_service.ListDataItemsResponse( - data_items=[], - next_page_token="def", + data_items=[], next_page_token="def", ), dataset_service.ListDataItemsResponse( - data_items=[ - data_item.DataItem(), - ], - next_page_token="ghi", + data_items=[data_item.DataItem(),], next_page_token="ghi", ), dataset_service.ListDataItemsResponse( - data_items=[ - data_item.DataItem(), - data_item.DataItem(), - ], + data_items=[data_item.DataItem(), data_item.DataItem(),], ), RuntimeError, ) @@ -2519,9 +2264,7 @@ def test_list_data_items_pager(): def test_list_data_items_pages(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_data_items), "__call__") as call: @@ -2536,20 +2279,13 @@ def test_list_data_items_pages(): next_page_token="abc", ), dataset_service.ListDataItemsResponse( - data_items=[], - next_page_token="def", + data_items=[], next_page_token="def", ), dataset_service.ListDataItemsResponse( - data_items=[ - data_item.DataItem(), - ], - next_page_token="ghi", + data_items=[data_item.DataItem(),], next_page_token="ghi", ), dataset_service.ListDataItemsResponse( - data_items=[ - data_item.DataItem(), - data_item.DataItem(), - ], + data_items=[data_item.DataItem(), data_item.DataItem(),], ), RuntimeError, ) @@ -2560,9 +2296,7 @@ def test_list_data_items_pages(): @pytest.mark.asyncio async def test_list_data_items_async_pager(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2579,26 +2313,17 @@ async def test_list_data_items_async_pager(): next_page_token="abc", ), dataset_service.ListDataItemsResponse( - data_items=[], - next_page_token="def", + data_items=[], next_page_token="def", ), dataset_service.ListDataItemsResponse( - data_items=[ - data_item.DataItem(), - ], - next_page_token="ghi", + data_items=[data_item.DataItem(),], next_page_token="ghi", ), dataset_service.ListDataItemsResponse( - data_items=[ - data_item.DataItem(), - data_item.DataItem(), - ], + data_items=[data_item.DataItem(), data_item.DataItem(),], ), RuntimeError, ) - async_pager = await client.list_data_items( - request={}, - ) + async_pager = await client.list_data_items(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -2610,9 +2335,7 @@ async def test_list_data_items_async_pager(): @pytest.mark.asyncio async def test_list_data_items_async_pages(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2629,20 +2352,13 @@ async def test_list_data_items_async_pages(): next_page_token="abc", ), dataset_service.ListDataItemsResponse( - data_items=[], - next_page_token="def", + data_items=[], next_page_token="def", ), dataset_service.ListDataItemsResponse( - data_items=[ - data_item.DataItem(), - ], - next_page_token="ghi", + data_items=[data_item.DataItem(),], next_page_token="ghi", ), dataset_service.ListDataItemsResponse( - data_items=[ - data_item.DataItem(), - data_item.DataItem(), - ], + data_items=[data_item.DataItem(), data_item.DataItem(),], ), RuntimeError, ) @@ -2657,8 +2373,7 @@ def test_get_annotation_spec( transport: str = "grpc", request_type=dataset_service.GetAnnotationSpecRequest ): client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2671,9 +2386,7 @@ def test_get_annotation_spec( ) as call: # Designate an appropriate return value for the call. call.return_value = annotation_spec.AnnotationSpec( - name="name_value", - display_name="display_name_value", - etag="etag_value", + name="name_value", display_name="display_name_value", etag="etag_value", ) response = client.get_annotation_spec(request) @@ -2705,8 +2418,7 @@ async def test_get_annotation_spec_async( request_type=dataset_service.GetAnnotationSpecRequest, ): client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2720,9 +2432,7 @@ async def test_get_annotation_spec_async( # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( annotation_spec.AnnotationSpec( - name="name_value", - display_name="display_name_value", - etag="etag_value", + name="name_value", display_name="display_name_value", etag="etag_value", ) ) @@ -2750,9 +2460,7 @@ async def test_get_annotation_spec_async_from_dict(): def test_get_annotation_spec_field_headers(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2774,17 +2482,12 @@ def test_get_annotation_spec_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_annotation_spec_field_headers_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2808,16 +2511,11 @@ async def test_get_annotation_spec_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_annotation_spec_flattened(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2828,9 +2526,7 @@ def test_get_annotation_spec_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_annotation_spec( - name="name_value", - ) + client.get_annotation_spec(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -2841,24 +2537,19 @@ def test_get_annotation_spec_flattened(): def test_get_annotation_spec_flattened_error(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_annotation_spec( - dataset_service.GetAnnotationSpecRequest(), - name="name_value", + dataset_service.GetAnnotationSpecRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_annotation_spec_flattened_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2872,9 +2563,7 @@ async def test_get_annotation_spec_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_annotation_spec( - name="name_value", - ) + response = await client.get_annotation_spec(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -2886,16 +2575,13 @@ async def test_get_annotation_spec_flattened_async(): @pytest.mark.asyncio async def test_get_annotation_spec_flattened_error_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_annotation_spec( - dataset_service.GetAnnotationSpecRequest(), - name="name_value", + dataset_service.GetAnnotationSpecRequest(), name="name_value", ) @@ -2903,8 +2589,7 @@ def test_list_annotations( transport: str = "grpc", request_type=dataset_service.ListAnnotationsRequest ): client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2942,8 +2627,7 @@ async def test_list_annotations_async( transport: str = "grpc_asyncio", request_type=dataset_service.ListAnnotationsRequest ): client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2979,9 +2663,7 @@ async def test_list_annotations_async_from_dict(): def test_list_annotations_field_headers(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3001,17 +2683,12 @@ def test_list_annotations_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_annotations_field_headers_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3033,16 +2710,11 @@ async def test_list_annotations_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_annotations_flattened(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_annotations), "__call__") as call: @@ -3051,9 +2723,7 @@ def test_list_annotations_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_annotations( - parent="parent_value", - ) + client.list_annotations(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -3064,24 +2734,19 @@ def test_list_annotations_flattened(): def test_list_annotations_flattened_error(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_annotations( - dataset_service.ListAnnotationsRequest(), - parent="parent_value", + dataset_service.ListAnnotationsRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_annotations_flattened_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_annotations), "__call__") as call: @@ -3093,9 +2758,7 @@ async def test_list_annotations_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_annotations( - parent="parent_value", - ) + response = await client.list_annotations(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -3107,23 +2770,18 @@ async def test_list_annotations_flattened_async(): @pytest.mark.asyncio async def test_list_annotations_flattened_error_async(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_annotations( - dataset_service.ListAnnotationsRequest(), - parent="parent_value", + dataset_service.ListAnnotationsRequest(), parent="parent_value", ) def test_list_annotations_pager(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_annotations), "__call__") as call: @@ -3138,20 +2796,13 @@ def test_list_annotations_pager(): next_page_token="abc", ), dataset_service.ListAnnotationsResponse( - annotations=[], - next_page_token="def", + annotations=[], next_page_token="def", ), dataset_service.ListAnnotationsResponse( - annotations=[ - annotation.Annotation(), - ], - next_page_token="ghi", + annotations=[annotation.Annotation(),], next_page_token="ghi", ), dataset_service.ListAnnotationsResponse( - annotations=[ - annotation.Annotation(), - annotation.Annotation(), - ], + annotations=[annotation.Annotation(), annotation.Annotation(),], ), RuntimeError, ) @@ -3170,9 +2821,7 @@ def test_list_annotations_pager(): def test_list_annotations_pages(): - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_annotations), "__call__") as call: @@ -3187,20 +2836,13 @@ def test_list_annotations_pages(): next_page_token="abc", ), dataset_service.ListAnnotationsResponse( - annotations=[], - next_page_token="def", + annotations=[], next_page_token="def", ), dataset_service.ListAnnotationsResponse( - annotations=[ - annotation.Annotation(), - ], - next_page_token="ghi", + annotations=[annotation.Annotation(),], next_page_token="ghi", ), dataset_service.ListAnnotationsResponse( - annotations=[ - annotation.Annotation(), - annotation.Annotation(), - ], + annotations=[annotation.Annotation(), annotation.Annotation(),], ), RuntimeError, ) @@ -3211,9 +2853,7 @@ def test_list_annotations_pages(): @pytest.mark.asyncio async def test_list_annotations_async_pager(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3230,26 +2870,17 @@ async def test_list_annotations_async_pager(): next_page_token="abc", ), dataset_service.ListAnnotationsResponse( - annotations=[], - next_page_token="def", + annotations=[], next_page_token="def", ), dataset_service.ListAnnotationsResponse( - annotations=[ - annotation.Annotation(), - ], - next_page_token="ghi", + annotations=[annotation.Annotation(),], next_page_token="ghi", ), dataset_service.ListAnnotationsResponse( - annotations=[ - annotation.Annotation(), - annotation.Annotation(), - ], + annotations=[annotation.Annotation(), annotation.Annotation(),], ), RuntimeError, ) - async_pager = await client.list_annotations( - request={}, - ) + async_pager = await client.list_annotations(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -3261,9 +2892,7 @@ async def test_list_annotations_async_pager(): @pytest.mark.asyncio async def test_list_annotations_async_pages(): - client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = DatasetServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3280,20 +2909,13 @@ async def test_list_annotations_async_pages(): next_page_token="abc", ), dataset_service.ListAnnotationsResponse( - annotations=[], - next_page_token="def", + annotations=[], next_page_token="def", ), dataset_service.ListAnnotationsResponse( - annotations=[ - annotation.Annotation(), - ], - next_page_token="ghi", + annotations=[annotation.Annotation(),], next_page_token="ghi", ), dataset_service.ListAnnotationsResponse( - annotations=[ - annotation.Annotation(), - annotation.Annotation(), - ], + annotations=[annotation.Annotation(), annotation.Annotation(),], ), RuntimeError, ) @@ -3311,8 +2933,7 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -3331,8 +2952,7 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = DatasetServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + client_options={"scopes": ["1", "2"]}, transport=transport, ) @@ -3377,13 +2997,8 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.DatasetServiceGrpcTransport, - ) + client = DatasetServiceClient(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client.transport, transports.DatasetServiceGrpcTransport,) def test_dataset_service_base_transport_error(): @@ -3439,8 +3054,7 @@ def test_dataset_service_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (credentials.AnonymousCredentials(), None) transport = transports.DatasetServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", + credentials_file="credentials.json", quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -3510,8 +3124,7 @@ def test_dataset_service_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.DatasetServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -3523,8 +3136,7 @@ def test_dataset_service_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.DatasetServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -3621,16 +3233,12 @@ def test_dataset_service_transport_channel_mtls_with_adc(transport_class): def test_dataset_service_grpc_lro_client(): client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - transport="grpc", + credentials=credentials.AnonymousCredentials(), transport="grpc", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) + assert isinstance(transport.operations_client, operations_v1.OperationsClient,) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -3638,16 +3246,12 @@ def test_dataset_service_grpc_lro_client(): def test_dataset_service_grpc_lro_async_client(): client = DatasetServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport="grpc_asyncio", + credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) + assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -3727,10 +3331,7 @@ def test_data_item_path(): data_item = "nautilus" expected = "projects/{project}/locations/{location}/datasets/{dataset}/dataItems/{data_item}".format( - project=project, - location=location, - dataset=dataset, - data_item=data_item, + project=project, location=location, dataset=dataset, data_item=data_item, ) actual = DatasetServiceClient.data_item_path(project, location, dataset, data_item) assert expected == actual @@ -3756,9 +3357,7 @@ def test_dataset_path(): dataset = "oyster" expected = "projects/{project}/locations/{location}/datasets/{dataset}".format( - project=project, - location=location, - dataset=dataset, + project=project, location=location, dataset=dataset, ) actual = DatasetServiceClient.dataset_path(project, location, dataset) assert expected == actual @@ -3801,9 +3400,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "scallop" - expected = "folders/{folder}".format( - folder=folder, - ) + expected = "folders/{folder}".format(folder=folder,) actual = DatasetServiceClient.common_folder_path(folder) assert expected == actual @@ -3822,9 +3419,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "squid" - expected = "organizations/{organization}".format( - organization=organization, - ) + expected = "organizations/{organization}".format(organization=organization,) actual = DatasetServiceClient.common_organization_path(organization) assert expected == actual @@ -3843,9 +3438,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "whelk" - expected = "projects/{project}".format( - project=project, - ) + expected = "projects/{project}".format(project=project,) actual = DatasetServiceClient.common_project_path(project) assert expected == actual @@ -3866,8 +3459,7 @@ def test_common_location_path(): location = "nudibranch" expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, + project=project, location=location, ) actual = DatasetServiceClient.common_location_path(project, location) assert expected == actual @@ -3892,8 +3484,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.DatasetServiceTransport, "_prep_wrapped_messages" ) as prep: client = DatasetServiceClient( - credentials=credentials.AnonymousCredentials(), - client_info=client_info, + credentials=credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -3902,7 +3493,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = DatasetServiceClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), - client_info=client_info, + credentials=credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_endpoint_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_endpoint_service.py index 8994b2c8be..93c35a7a2a 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_endpoint_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_endpoint_service.py @@ -401,9 +401,7 @@ def test_endpoint_service_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) + options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) @@ -471,8 +469,7 @@ def test_create_endpoint( transport: str = "grpc", request_type=endpoint_service.CreateEndpointRequest ): client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -505,8 +502,7 @@ async def test_create_endpoint_async( transport: str = "grpc_asyncio", request_type=endpoint_service.CreateEndpointRequest ): client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -538,9 +534,7 @@ async def test_create_endpoint_async_from_dict(): def test_create_endpoint_field_headers(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -560,17 +554,12 @@ def test_create_endpoint_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_create_endpoint_field_headers_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -592,16 +581,11 @@ async def test_create_endpoint_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_create_endpoint_flattened(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_endpoint), "__call__") as call: @@ -611,8 +595,7 @@ def test_create_endpoint_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_endpoint( - parent="parent_value", - endpoint=gca_endpoint.Endpoint(name="name_value"), + parent="parent_value", endpoint=gca_endpoint.Endpoint(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -626,9 +609,7 @@ def test_create_endpoint_flattened(): def test_create_endpoint_flattened_error(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -642,9 +623,7 @@ def test_create_endpoint_flattened_error(): @pytest.mark.asyncio async def test_create_endpoint_flattened_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_endpoint), "__call__") as call: @@ -657,8 +636,7 @@ async def test_create_endpoint_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_endpoint( - parent="parent_value", - endpoint=gca_endpoint.Endpoint(name="name_value"), + parent="parent_value", endpoint=gca_endpoint.Endpoint(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -673,9 +651,7 @@ async def test_create_endpoint_flattened_async(): @pytest.mark.asyncio async def test_create_endpoint_flattened_error_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -691,8 +667,7 @@ def test_get_endpoint( transport: str = "grpc", request_type=endpoint_service.GetEndpointRequest ): client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -739,8 +714,7 @@ async def test_get_endpoint_async( transport: str = "grpc_asyncio", request_type=endpoint_service.GetEndpointRequest ): client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -785,9 +759,7 @@ async def test_get_endpoint_async_from_dict(): def test_get_endpoint_field_headers(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -807,17 +779,12 @@ def test_get_endpoint_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_endpoint_field_headers_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -837,16 +804,11 @@ async def test_get_endpoint_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_endpoint_flattened(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_endpoint), "__call__") as call: @@ -855,9 +817,7 @@ def test_get_endpoint_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_endpoint( - name="name_value", - ) + client.get_endpoint(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -868,24 +828,19 @@ def test_get_endpoint_flattened(): def test_get_endpoint_flattened_error(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_endpoint( - endpoint_service.GetEndpointRequest(), - name="name_value", + endpoint_service.GetEndpointRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_endpoint_flattened_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_endpoint), "__call__") as call: @@ -895,9 +850,7 @@ async def test_get_endpoint_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(endpoint.Endpoint()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_endpoint( - name="name_value", - ) + response = await client.get_endpoint(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -909,16 +862,13 @@ async def test_get_endpoint_flattened_async(): @pytest.mark.asyncio async def test_get_endpoint_flattened_error_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_endpoint( - endpoint_service.GetEndpointRequest(), - name="name_value", + endpoint_service.GetEndpointRequest(), name="name_value", ) @@ -926,8 +876,7 @@ def test_list_endpoints( transport: str = "grpc", request_type=endpoint_service.ListEndpointsRequest ): client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -965,8 +914,7 @@ async def test_list_endpoints_async( transport: str = "grpc_asyncio", request_type=endpoint_service.ListEndpointsRequest ): client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1002,9 +950,7 @@ async def test_list_endpoints_async_from_dict(): def test_list_endpoints_field_headers(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1024,17 +970,12 @@ def test_list_endpoints_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_endpoints_field_headers_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1056,16 +997,11 @@ async def test_list_endpoints_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_endpoints_flattened(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_endpoints), "__call__") as call: @@ -1074,9 +1010,7 @@ def test_list_endpoints_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_endpoints( - parent="parent_value", - ) + client.list_endpoints(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1087,24 +1021,19 @@ def test_list_endpoints_flattened(): def test_list_endpoints_flattened_error(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_endpoints( - endpoint_service.ListEndpointsRequest(), - parent="parent_value", + endpoint_service.ListEndpointsRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_endpoints_flattened_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_endpoints), "__call__") as call: @@ -1116,9 +1045,7 @@ async def test_list_endpoints_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_endpoints( - parent="parent_value", - ) + response = await client.list_endpoints(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1130,23 +1057,18 @@ async def test_list_endpoints_flattened_async(): @pytest.mark.asyncio async def test_list_endpoints_flattened_error_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_endpoints( - endpoint_service.ListEndpointsRequest(), - parent="parent_value", + endpoint_service.ListEndpointsRequest(), parent="parent_value", ) def test_list_endpoints_pager(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_endpoints), "__call__") as call: @@ -1161,20 +1083,13 @@ def test_list_endpoints_pager(): next_page_token="abc", ), endpoint_service.ListEndpointsResponse( - endpoints=[], - next_page_token="def", + endpoints=[], next_page_token="def", ), endpoint_service.ListEndpointsResponse( - endpoints=[ - endpoint.Endpoint(), - ], - next_page_token="ghi", + endpoints=[endpoint.Endpoint(),], next_page_token="ghi", ), endpoint_service.ListEndpointsResponse( - endpoints=[ - endpoint.Endpoint(), - endpoint.Endpoint(), - ], + endpoints=[endpoint.Endpoint(), endpoint.Endpoint(),], ), RuntimeError, ) @@ -1193,9 +1108,7 @@ def test_list_endpoints_pager(): def test_list_endpoints_pages(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_endpoints), "__call__") as call: @@ -1210,20 +1123,13 @@ def test_list_endpoints_pages(): next_page_token="abc", ), endpoint_service.ListEndpointsResponse( - endpoints=[], - next_page_token="def", + endpoints=[], next_page_token="def", ), endpoint_service.ListEndpointsResponse( - endpoints=[ - endpoint.Endpoint(), - ], - next_page_token="ghi", + endpoints=[endpoint.Endpoint(),], next_page_token="ghi", ), endpoint_service.ListEndpointsResponse( - endpoints=[ - endpoint.Endpoint(), - endpoint.Endpoint(), - ], + endpoints=[endpoint.Endpoint(), endpoint.Endpoint(),], ), RuntimeError, ) @@ -1234,9 +1140,7 @@ def test_list_endpoints_pages(): @pytest.mark.asyncio async def test_list_endpoints_async_pager(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1253,26 +1157,17 @@ async def test_list_endpoints_async_pager(): next_page_token="abc", ), endpoint_service.ListEndpointsResponse( - endpoints=[], - next_page_token="def", + endpoints=[], next_page_token="def", ), endpoint_service.ListEndpointsResponse( - endpoints=[ - endpoint.Endpoint(), - ], - next_page_token="ghi", + endpoints=[endpoint.Endpoint(),], next_page_token="ghi", ), endpoint_service.ListEndpointsResponse( - endpoints=[ - endpoint.Endpoint(), - endpoint.Endpoint(), - ], + endpoints=[endpoint.Endpoint(), endpoint.Endpoint(),], ), RuntimeError, ) - async_pager = await client.list_endpoints( - request={}, - ) + async_pager = await client.list_endpoints(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -1284,9 +1179,7 @@ async def test_list_endpoints_async_pager(): @pytest.mark.asyncio async def test_list_endpoints_async_pages(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1303,20 +1196,13 @@ async def test_list_endpoints_async_pages(): next_page_token="abc", ), endpoint_service.ListEndpointsResponse( - endpoints=[], - next_page_token="def", + endpoints=[], next_page_token="def", ), endpoint_service.ListEndpointsResponse( - endpoints=[ - endpoint.Endpoint(), - ], - next_page_token="ghi", + endpoints=[endpoint.Endpoint(),], next_page_token="ghi", ), endpoint_service.ListEndpointsResponse( - endpoints=[ - endpoint.Endpoint(), - endpoint.Endpoint(), - ], + endpoints=[endpoint.Endpoint(), endpoint.Endpoint(),], ), RuntimeError, ) @@ -1331,8 +1217,7 @@ def test_update_endpoint( transport: str = "grpc", request_type=endpoint_service.UpdateEndpointRequest ): client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1379,8 +1264,7 @@ async def test_update_endpoint_async( transport: str = "grpc_asyncio", request_type=endpoint_service.UpdateEndpointRequest ): client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1425,9 +1309,7 @@ async def test_update_endpoint_async_from_dict(): def test_update_endpoint_field_headers(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1447,17 +1329,14 @@ def test_update_endpoint_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "endpoint.name=endpoint.name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "endpoint.name=endpoint.name/value",) in kw[ + "metadata" + ] @pytest.mark.asyncio async def test_update_endpoint_field_headers_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1479,16 +1358,13 @@ async def test_update_endpoint_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "endpoint.name=endpoint.name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "endpoint.name=endpoint.name/value",) in kw[ + "metadata" + ] def test_update_endpoint_flattened(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_endpoint), "__call__") as call: @@ -1513,9 +1389,7 @@ def test_update_endpoint_flattened(): def test_update_endpoint_flattened_error(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1529,9 +1403,7 @@ def test_update_endpoint_flattened_error(): @pytest.mark.asyncio async def test_update_endpoint_flattened_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_endpoint), "__call__") as call: @@ -1560,9 +1432,7 @@ async def test_update_endpoint_flattened_async(): @pytest.mark.asyncio async def test_update_endpoint_flattened_error_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1578,8 +1448,7 @@ def test_delete_endpoint( transport: str = "grpc", request_type=endpoint_service.DeleteEndpointRequest ): client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1612,8 +1481,7 @@ async def test_delete_endpoint_async( transport: str = "grpc_asyncio", request_type=endpoint_service.DeleteEndpointRequest ): client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1645,9 +1513,7 @@ async def test_delete_endpoint_async_from_dict(): def test_delete_endpoint_field_headers(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1667,17 +1533,12 @@ def test_delete_endpoint_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_delete_endpoint_field_headers_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1699,16 +1560,11 @@ async def test_delete_endpoint_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_delete_endpoint_flattened(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_endpoint), "__call__") as call: @@ -1717,9 +1573,7 @@ def test_delete_endpoint_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_endpoint( - name="name_value", - ) + client.delete_endpoint(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1730,24 +1584,19 @@ def test_delete_endpoint_flattened(): def test_delete_endpoint_flattened_error(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_endpoint( - endpoint_service.DeleteEndpointRequest(), - name="name_value", + endpoint_service.DeleteEndpointRequest(), name="name_value", ) @pytest.mark.asyncio async def test_delete_endpoint_flattened_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_endpoint), "__call__") as call: @@ -1759,9 +1608,7 @@ async def test_delete_endpoint_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_endpoint( - name="name_value", - ) + response = await client.delete_endpoint(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1773,16 +1620,13 @@ async def test_delete_endpoint_flattened_async(): @pytest.mark.asyncio async def test_delete_endpoint_flattened_error_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_endpoint( - endpoint_service.DeleteEndpointRequest(), - name="name_value", + endpoint_service.DeleteEndpointRequest(), name="name_value", ) @@ -1790,8 +1634,7 @@ def test_deploy_model( transport: str = "grpc", request_type=endpoint_service.DeployModelRequest ): client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1824,8 +1667,7 @@ async def test_deploy_model_async( transport: str = "grpc_asyncio", request_type=endpoint_service.DeployModelRequest ): client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1857,9 +1699,7 @@ async def test_deploy_model_async_from_dict(): def test_deploy_model_field_headers(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1879,17 +1719,12 @@ def test_deploy_model_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "endpoint=endpoint/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "endpoint=endpoint/value",) in kw["metadata"] @pytest.mark.asyncio async def test_deploy_model_field_headers_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1911,16 +1746,11 @@ async def test_deploy_model_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "endpoint=endpoint/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "endpoint=endpoint/value",) in kw["metadata"] def test_deploy_model_flattened(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.deploy_model), "__call__") as call: @@ -1960,9 +1790,7 @@ def test_deploy_model_flattened(): def test_deploy_model_flattened_error(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1983,9 +1811,7 @@ def test_deploy_model_flattened_error(): @pytest.mark.asyncio async def test_deploy_model_flattened_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.deploy_model), "__call__") as call: @@ -2029,9 +1855,7 @@ async def test_deploy_model_flattened_async(): @pytest.mark.asyncio async def test_deploy_model_flattened_error_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2054,8 +1878,7 @@ def test_undeploy_model( transport: str = "grpc", request_type=endpoint_service.UndeployModelRequest ): client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2088,8 +1911,7 @@ async def test_undeploy_model_async( transport: str = "grpc_asyncio", request_type=endpoint_service.UndeployModelRequest ): client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2121,9 +1943,7 @@ async def test_undeploy_model_async_from_dict(): def test_undeploy_model_field_headers(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2143,17 +1963,12 @@ def test_undeploy_model_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "endpoint=endpoint/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "endpoint=endpoint/value",) in kw["metadata"] @pytest.mark.asyncio async def test_undeploy_model_field_headers_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2175,16 +1990,11 @@ async def test_undeploy_model_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "endpoint=endpoint/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "endpoint=endpoint/value",) in kw["metadata"] def test_undeploy_model_flattened(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.undeploy_model), "__call__") as call: @@ -2212,9 +2022,7 @@ def test_undeploy_model_flattened(): def test_undeploy_model_flattened_error(): - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2229,9 +2037,7 @@ def test_undeploy_model_flattened_error(): @pytest.mark.asyncio async def test_undeploy_model_flattened_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.undeploy_model), "__call__") as call: @@ -2263,9 +2069,7 @@ async def test_undeploy_model_flattened_async(): @pytest.mark.asyncio async def test_undeploy_model_flattened_error_async(): - client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = EndpointServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2285,8 +2089,7 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -2305,8 +2108,7 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = EndpointServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + client_options={"scopes": ["1", "2"]}, transport=transport, ) @@ -2351,13 +2153,8 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.EndpointServiceGrpcTransport, - ) + client = EndpointServiceClient(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client.transport, transports.EndpointServiceGrpcTransport,) def test_endpoint_service_base_transport_error(): @@ -2410,8 +2207,7 @@ def test_endpoint_service_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (credentials.AnonymousCredentials(), None) transport = transports.EndpointServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", + credentials_file="credentials.json", quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -2481,8 +2277,7 @@ def test_endpoint_service_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.EndpointServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2494,8 +2289,7 @@ def test_endpoint_service_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.EndpointServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2592,16 +2386,12 @@ def test_endpoint_service_transport_channel_mtls_with_adc(transport_class): def test_endpoint_service_grpc_lro_client(): client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - transport="grpc", + credentials=credentials.AnonymousCredentials(), transport="grpc", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) + assert isinstance(transport.operations_client, operations_v1.OperationsClient,) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -2609,16 +2399,12 @@ def test_endpoint_service_grpc_lro_client(): def test_endpoint_service_grpc_lro_async_client(): client = EndpointServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport="grpc_asyncio", + credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) + assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -2630,9 +2416,7 @@ def test_endpoint_path(): endpoint = "whelk" expected = "projects/{project}/locations/{location}/endpoints/{endpoint}".format( - project=project, - location=location, - endpoint=endpoint, + project=project, location=location, endpoint=endpoint, ) actual = EndpointServiceClient.endpoint_path(project, location, endpoint) assert expected == actual @@ -2657,9 +2441,7 @@ def test_model_path(): model = "winkle" expected = "projects/{project}/locations/{location}/models/{model}".format( - project=project, - location=location, - model=model, + project=project, location=location, model=model, ) actual = EndpointServiceClient.model_path(project, location, model) assert expected == actual @@ -2702,9 +2484,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format( - folder=folder, - ) + expected = "folders/{folder}".format(folder=folder,) actual = EndpointServiceClient.common_folder_path(folder) assert expected == actual @@ -2723,9 +2503,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format( - organization=organization, - ) + expected = "organizations/{organization}".format(organization=organization,) actual = EndpointServiceClient.common_organization_path(organization) assert expected == actual @@ -2744,9 +2522,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format( - project=project, - ) + expected = "projects/{project}".format(project=project,) actual = EndpointServiceClient.common_project_path(project) assert expected == actual @@ -2767,8 +2543,7 @@ def test_common_location_path(): location = "nautilus" expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, + project=project, location=location, ) actual = EndpointServiceClient.common_location_path(project, location) assert expected == actual @@ -2793,8 +2568,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.EndpointServiceTransport, "_prep_wrapped_messages" ) as prep: client = EndpointServiceClient( - credentials=credentials.AnonymousCredentials(), - client_info=client_info, + credentials=credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2803,7 +2577,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = EndpointServiceClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), - client_info=client_info, + credentials=credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_job_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_job_service.py index f99ac1ce5d..f08d84bd2f 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_job_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_job_service.py @@ -399,9 +399,7 @@ def test_job_service_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) + options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) @@ -467,8 +465,7 @@ def test_create_custom_job( transport: str = "grpc", request_type=job_service.CreateCustomJobRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -514,8 +511,7 @@ async def test_create_custom_job_async( transport: str = "grpc_asyncio", request_type=job_service.CreateCustomJobRequest ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -559,9 +555,7 @@ async def test_create_custom_job_async_from_dict(): def test_create_custom_job_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -583,17 +577,12 @@ def test_create_custom_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_create_custom_job_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -617,16 +606,11 @@ async def test_create_custom_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_create_custom_job_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -653,9 +637,7 @@ def test_create_custom_job_flattened(): def test_create_custom_job_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -669,9 +651,7 @@ def test_create_custom_job_flattened_error(): @pytest.mark.asyncio async def test_create_custom_job_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -702,9 +682,7 @@ async def test_create_custom_job_flattened_async(): @pytest.mark.asyncio async def test_create_custom_job_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -720,8 +698,7 @@ def test_get_custom_job( transport: str = "grpc", request_type=job_service.GetCustomJobRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -765,8 +742,7 @@ async def test_get_custom_job_async( transport: str = "grpc_asyncio", request_type=job_service.GetCustomJobRequest ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -808,9 +784,7 @@ async def test_get_custom_job_async_from_dict(): def test_get_custom_job_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -830,17 +804,12 @@ def test_get_custom_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_custom_job_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -862,16 +831,11 @@ async def test_get_custom_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_custom_job_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_custom_job), "__call__") as call: @@ -880,9 +844,7 @@ def test_get_custom_job_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_custom_job( - name="name_value", - ) + client.get_custom_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -893,24 +855,19 @@ def test_get_custom_job_flattened(): def test_get_custom_job_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_custom_job( - job_service.GetCustomJobRequest(), - name="name_value", + job_service.GetCustomJobRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_custom_job_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_custom_job), "__call__") as call: @@ -922,9 +879,7 @@ async def test_get_custom_job_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_custom_job( - name="name_value", - ) + response = await client.get_custom_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -936,16 +891,13 @@ async def test_get_custom_job_flattened_async(): @pytest.mark.asyncio async def test_get_custom_job_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_custom_job( - job_service.GetCustomJobRequest(), - name="name_value", + job_service.GetCustomJobRequest(), name="name_value", ) @@ -953,8 +905,7 @@ def test_list_custom_jobs( transport: str = "grpc", request_type=job_service.ListCustomJobsRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -992,8 +943,7 @@ async def test_list_custom_jobs_async( transport: str = "grpc_asyncio", request_type=job_service.ListCustomJobsRequest ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1004,9 +954,7 @@ async def test_list_custom_jobs_async( with mock.patch.object(type(client.transport.list_custom_jobs), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - job_service.ListCustomJobsResponse( - next_page_token="next_page_token_value", - ) + job_service.ListCustomJobsResponse(next_page_token="next_page_token_value",) ) response = await client.list_custom_jobs(request) @@ -1029,9 +977,7 @@ async def test_list_custom_jobs_async_from_dict(): def test_list_custom_jobs_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1051,17 +997,12 @@ def test_list_custom_jobs_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_custom_jobs_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1083,16 +1024,11 @@ async def test_list_custom_jobs_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_custom_jobs_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_custom_jobs), "__call__") as call: @@ -1101,9 +1037,7 @@ def test_list_custom_jobs_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_custom_jobs( - parent="parent_value", - ) + client.list_custom_jobs(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1114,24 +1048,19 @@ def test_list_custom_jobs_flattened(): def test_list_custom_jobs_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_custom_jobs( - job_service.ListCustomJobsRequest(), - parent="parent_value", + job_service.ListCustomJobsRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_custom_jobs_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_custom_jobs), "__call__") as call: @@ -1143,9 +1072,7 @@ async def test_list_custom_jobs_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_custom_jobs( - parent="parent_value", - ) + response = await client.list_custom_jobs(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1157,23 +1084,18 @@ async def test_list_custom_jobs_flattened_async(): @pytest.mark.asyncio async def test_list_custom_jobs_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_custom_jobs( - job_service.ListCustomJobsRequest(), - parent="parent_value", + job_service.ListCustomJobsRequest(), parent="parent_value", ) def test_list_custom_jobs_pager(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_custom_jobs), "__call__") as call: @@ -1187,21 +1109,12 @@ def test_list_custom_jobs_pager(): ], next_page_token="abc", ), + job_service.ListCustomJobsResponse(custom_jobs=[], next_page_token="def",), job_service.ListCustomJobsResponse( - custom_jobs=[], - next_page_token="def", - ), - job_service.ListCustomJobsResponse( - custom_jobs=[ - custom_job.CustomJob(), - ], - next_page_token="ghi", + custom_jobs=[custom_job.CustomJob(),], next_page_token="ghi", ), job_service.ListCustomJobsResponse( - custom_jobs=[ - custom_job.CustomJob(), - custom_job.CustomJob(), - ], + custom_jobs=[custom_job.CustomJob(), custom_job.CustomJob(),], ), RuntimeError, ) @@ -1220,9 +1133,7 @@ def test_list_custom_jobs_pager(): def test_list_custom_jobs_pages(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_custom_jobs), "__call__") as call: @@ -1236,21 +1147,12 @@ def test_list_custom_jobs_pages(): ], next_page_token="abc", ), + job_service.ListCustomJobsResponse(custom_jobs=[], next_page_token="def",), job_service.ListCustomJobsResponse( - custom_jobs=[], - next_page_token="def", + custom_jobs=[custom_job.CustomJob(),], next_page_token="ghi", ), job_service.ListCustomJobsResponse( - custom_jobs=[ - custom_job.CustomJob(), - ], - next_page_token="ghi", - ), - job_service.ListCustomJobsResponse( - custom_jobs=[ - custom_job.CustomJob(), - custom_job.CustomJob(), - ], + custom_jobs=[custom_job.CustomJob(), custom_job.CustomJob(),], ), RuntimeError, ) @@ -1261,9 +1163,7 @@ def test_list_custom_jobs_pages(): @pytest.mark.asyncio async def test_list_custom_jobs_async_pager(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1279,27 +1179,16 @@ async def test_list_custom_jobs_async_pager(): ], next_page_token="abc", ), + job_service.ListCustomJobsResponse(custom_jobs=[], next_page_token="def",), job_service.ListCustomJobsResponse( - custom_jobs=[], - next_page_token="def", - ), - job_service.ListCustomJobsResponse( - custom_jobs=[ - custom_job.CustomJob(), - ], - next_page_token="ghi", + custom_jobs=[custom_job.CustomJob(),], next_page_token="ghi", ), job_service.ListCustomJobsResponse( - custom_jobs=[ - custom_job.CustomJob(), - custom_job.CustomJob(), - ], + custom_jobs=[custom_job.CustomJob(), custom_job.CustomJob(),], ), RuntimeError, ) - async_pager = await client.list_custom_jobs( - request={}, - ) + async_pager = await client.list_custom_jobs(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -1311,9 +1200,7 @@ async def test_list_custom_jobs_async_pager(): @pytest.mark.asyncio async def test_list_custom_jobs_async_pages(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1329,21 +1216,12 @@ async def test_list_custom_jobs_async_pages(): ], next_page_token="abc", ), + job_service.ListCustomJobsResponse(custom_jobs=[], next_page_token="def",), job_service.ListCustomJobsResponse( - custom_jobs=[], - next_page_token="def", + custom_jobs=[custom_job.CustomJob(),], next_page_token="ghi", ), job_service.ListCustomJobsResponse( - custom_jobs=[ - custom_job.CustomJob(), - ], - next_page_token="ghi", - ), - job_service.ListCustomJobsResponse( - custom_jobs=[ - custom_job.CustomJob(), - custom_job.CustomJob(), - ], + custom_jobs=[custom_job.CustomJob(), custom_job.CustomJob(),], ), RuntimeError, ) @@ -1358,8 +1236,7 @@ def test_delete_custom_job( transport: str = "grpc", request_type=job_service.DeleteCustomJobRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1394,8 +1271,7 @@ async def test_delete_custom_job_async( transport: str = "grpc_asyncio", request_type=job_service.DeleteCustomJobRequest ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1429,9 +1305,7 @@ async def test_delete_custom_job_async_from_dict(): def test_delete_custom_job_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1453,17 +1327,12 @@ def test_delete_custom_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_delete_custom_job_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1487,16 +1356,11 @@ async def test_delete_custom_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_delete_custom_job_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1507,9 +1371,7 @@ def test_delete_custom_job_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_custom_job( - name="name_value", - ) + client.delete_custom_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1520,24 +1382,19 @@ def test_delete_custom_job_flattened(): def test_delete_custom_job_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_custom_job( - job_service.DeleteCustomJobRequest(), - name="name_value", + job_service.DeleteCustomJobRequest(), name="name_value", ) @pytest.mark.asyncio async def test_delete_custom_job_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1551,9 +1408,7 @@ async def test_delete_custom_job_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_custom_job( - name="name_value", - ) + response = await client.delete_custom_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1565,16 +1420,13 @@ async def test_delete_custom_job_flattened_async(): @pytest.mark.asyncio async def test_delete_custom_job_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_custom_job( - job_service.DeleteCustomJobRequest(), - name="name_value", + job_service.DeleteCustomJobRequest(), name="name_value", ) @@ -1582,8 +1434,7 @@ def test_cancel_custom_job( transport: str = "grpc", request_type=job_service.CancelCustomJobRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1618,8 +1469,7 @@ async def test_cancel_custom_job_async( transport: str = "grpc_asyncio", request_type=job_service.CancelCustomJobRequest ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1651,9 +1501,7 @@ async def test_cancel_custom_job_async_from_dict(): def test_cancel_custom_job_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1675,17 +1523,12 @@ def test_cancel_custom_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_cancel_custom_job_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1707,16 +1550,11 @@ async def test_cancel_custom_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_cancel_custom_job_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1727,9 +1565,7 @@ def test_cancel_custom_job_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.cancel_custom_job( - name="name_value", - ) + client.cancel_custom_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1740,24 +1576,19 @@ def test_cancel_custom_job_flattened(): def test_cancel_custom_job_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.cancel_custom_job( - job_service.CancelCustomJobRequest(), - name="name_value", + job_service.CancelCustomJobRequest(), name="name_value", ) @pytest.mark.asyncio async def test_cancel_custom_job_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1769,9 +1600,7 @@ async def test_cancel_custom_job_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.cancel_custom_job( - name="name_value", - ) + response = await client.cancel_custom_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1783,16 +1612,13 @@ async def test_cancel_custom_job_flattened_async(): @pytest.mark.asyncio async def test_cancel_custom_job_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.cancel_custom_job( - job_service.CancelCustomJobRequest(), - name="name_value", + job_service.CancelCustomJobRequest(), name="name_value", ) @@ -1800,8 +1626,7 @@ def test_create_data_labeling_job( transport: str = "grpc", request_type=job_service.CreateDataLabelingJobRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1866,8 +1691,7 @@ async def test_create_data_labeling_job_async( request_type=job_service.CreateDataLabelingJobRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1929,9 +1753,7 @@ async def test_create_data_labeling_job_async_from_dict(): def test_create_data_labeling_job_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1953,17 +1775,12 @@ def test_create_data_labeling_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_create_data_labeling_job_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1987,16 +1804,11 @@ async def test_create_data_labeling_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_create_data_labeling_job_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2025,9 +1837,7 @@ def test_create_data_labeling_job_flattened(): def test_create_data_labeling_job_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2041,9 +1851,7 @@ def test_create_data_labeling_job_flattened_error(): @pytest.mark.asyncio async def test_create_data_labeling_job_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2076,9 +1884,7 @@ async def test_create_data_labeling_job_flattened_async(): @pytest.mark.asyncio async def test_create_data_labeling_job_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2094,8 +1900,7 @@ def test_get_data_labeling_job( transport: str = "grpc", request_type=job_service.GetDataLabelingJobRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2159,8 +1964,7 @@ async def test_get_data_labeling_job_async( transport: str = "grpc_asyncio", request_type=job_service.GetDataLabelingJobRequest ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2222,9 +2026,7 @@ async def test_get_data_labeling_job_async_from_dict(): def test_get_data_labeling_job_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2246,17 +2048,12 @@ def test_get_data_labeling_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_data_labeling_job_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2280,16 +2077,11 @@ async def test_get_data_labeling_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_data_labeling_job_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2300,9 +2092,7 @@ def test_get_data_labeling_job_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_data_labeling_job( - name="name_value", - ) + client.get_data_labeling_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -2313,24 +2103,19 @@ def test_get_data_labeling_job_flattened(): def test_get_data_labeling_job_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_data_labeling_job( - job_service.GetDataLabelingJobRequest(), - name="name_value", + job_service.GetDataLabelingJobRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_data_labeling_job_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2344,9 +2129,7 @@ async def test_get_data_labeling_job_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_data_labeling_job( - name="name_value", - ) + response = await client.get_data_labeling_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -2358,16 +2141,13 @@ async def test_get_data_labeling_job_flattened_async(): @pytest.mark.asyncio async def test_get_data_labeling_job_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_data_labeling_job( - job_service.GetDataLabelingJobRequest(), - name="name_value", + job_service.GetDataLabelingJobRequest(), name="name_value", ) @@ -2375,8 +2155,7 @@ def test_list_data_labeling_jobs( transport: str = "grpc", request_type=job_service.ListDataLabelingJobsRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2417,8 +2196,7 @@ async def test_list_data_labeling_jobs_async( request_type=job_service.ListDataLabelingJobsRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2456,9 +2234,7 @@ async def test_list_data_labeling_jobs_async_from_dict(): def test_list_data_labeling_jobs_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2480,17 +2256,12 @@ def test_list_data_labeling_jobs_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_data_labeling_jobs_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2514,16 +2285,11 @@ async def test_list_data_labeling_jobs_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_data_labeling_jobs_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2534,9 +2300,7 @@ def test_list_data_labeling_jobs_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_data_labeling_jobs( - parent="parent_value", - ) + client.list_data_labeling_jobs(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -2547,24 +2311,19 @@ def test_list_data_labeling_jobs_flattened(): def test_list_data_labeling_jobs_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_data_labeling_jobs( - job_service.ListDataLabelingJobsRequest(), - parent="parent_value", + job_service.ListDataLabelingJobsRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_data_labeling_jobs_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2578,9 +2337,7 @@ async def test_list_data_labeling_jobs_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_data_labeling_jobs( - parent="parent_value", - ) + response = await client.list_data_labeling_jobs(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -2592,23 +2349,18 @@ async def test_list_data_labeling_jobs_flattened_async(): @pytest.mark.asyncio async def test_list_data_labeling_jobs_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_data_labeling_jobs( - job_service.ListDataLabelingJobsRequest(), - parent="parent_value", + job_service.ListDataLabelingJobsRequest(), parent="parent_value", ) def test_list_data_labeling_jobs_pager(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2625,13 +2377,10 @@ def test_list_data_labeling_jobs_pager(): next_page_token="abc", ), job_service.ListDataLabelingJobsResponse( - data_labeling_jobs=[], - next_page_token="def", + data_labeling_jobs=[], next_page_token="def", ), job_service.ListDataLabelingJobsResponse( - data_labeling_jobs=[ - data_labeling_job.DataLabelingJob(), - ], + data_labeling_jobs=[data_labeling_job.DataLabelingJob(),], next_page_token="ghi", ), job_service.ListDataLabelingJobsResponse( @@ -2657,9 +2406,7 @@ def test_list_data_labeling_jobs_pager(): def test_list_data_labeling_jobs_pages(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2676,13 +2423,10 @@ def test_list_data_labeling_jobs_pages(): next_page_token="abc", ), job_service.ListDataLabelingJobsResponse( - data_labeling_jobs=[], - next_page_token="def", + data_labeling_jobs=[], next_page_token="def", ), job_service.ListDataLabelingJobsResponse( - data_labeling_jobs=[ - data_labeling_job.DataLabelingJob(), - ], + data_labeling_jobs=[data_labeling_job.DataLabelingJob(),], next_page_token="ghi", ), job_service.ListDataLabelingJobsResponse( @@ -2700,9 +2444,7 @@ def test_list_data_labeling_jobs_pages(): @pytest.mark.asyncio async def test_list_data_labeling_jobs_async_pager(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2721,13 +2463,10 @@ async def test_list_data_labeling_jobs_async_pager(): next_page_token="abc", ), job_service.ListDataLabelingJobsResponse( - data_labeling_jobs=[], - next_page_token="def", + data_labeling_jobs=[], next_page_token="def", ), job_service.ListDataLabelingJobsResponse( - data_labeling_jobs=[ - data_labeling_job.DataLabelingJob(), - ], + data_labeling_jobs=[data_labeling_job.DataLabelingJob(),], next_page_token="ghi", ), job_service.ListDataLabelingJobsResponse( @@ -2738,9 +2477,7 @@ async def test_list_data_labeling_jobs_async_pager(): ), RuntimeError, ) - async_pager = await client.list_data_labeling_jobs( - request={}, - ) + async_pager = await client.list_data_labeling_jobs(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -2752,9 +2489,7 @@ async def test_list_data_labeling_jobs_async_pager(): @pytest.mark.asyncio async def test_list_data_labeling_jobs_async_pages(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2773,13 +2508,10 @@ async def test_list_data_labeling_jobs_async_pages(): next_page_token="abc", ), job_service.ListDataLabelingJobsResponse( - data_labeling_jobs=[], - next_page_token="def", + data_labeling_jobs=[], next_page_token="def", ), job_service.ListDataLabelingJobsResponse( - data_labeling_jobs=[ - data_labeling_job.DataLabelingJob(), - ], + data_labeling_jobs=[data_labeling_job.DataLabelingJob(),], next_page_token="ghi", ), job_service.ListDataLabelingJobsResponse( @@ -2801,8 +2533,7 @@ def test_delete_data_labeling_job( transport: str = "grpc", request_type=job_service.DeleteDataLabelingJobRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2838,8 +2569,7 @@ async def test_delete_data_labeling_job_async( request_type=job_service.DeleteDataLabelingJobRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2873,9 +2603,7 @@ async def test_delete_data_labeling_job_async_from_dict(): def test_delete_data_labeling_job_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2897,17 +2625,12 @@ def test_delete_data_labeling_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_delete_data_labeling_job_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2931,16 +2654,11 @@ async def test_delete_data_labeling_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_delete_data_labeling_job_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2951,9 +2669,7 @@ def test_delete_data_labeling_job_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_data_labeling_job( - name="name_value", - ) + client.delete_data_labeling_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -2964,24 +2680,19 @@ def test_delete_data_labeling_job_flattened(): def test_delete_data_labeling_job_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_data_labeling_job( - job_service.DeleteDataLabelingJobRequest(), - name="name_value", + job_service.DeleteDataLabelingJobRequest(), name="name_value", ) @pytest.mark.asyncio async def test_delete_data_labeling_job_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2995,9 +2706,7 @@ async def test_delete_data_labeling_job_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_data_labeling_job( - name="name_value", - ) + response = await client.delete_data_labeling_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -3009,16 +2718,13 @@ async def test_delete_data_labeling_job_flattened_async(): @pytest.mark.asyncio async def test_delete_data_labeling_job_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_data_labeling_job( - job_service.DeleteDataLabelingJobRequest(), - name="name_value", + job_service.DeleteDataLabelingJobRequest(), name="name_value", ) @@ -3026,8 +2732,7 @@ def test_cancel_data_labeling_job( transport: str = "grpc", request_type=job_service.CancelDataLabelingJobRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3063,8 +2768,7 @@ async def test_cancel_data_labeling_job_async( request_type=job_service.CancelDataLabelingJobRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3096,9 +2800,7 @@ async def test_cancel_data_labeling_job_async_from_dict(): def test_cancel_data_labeling_job_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3120,17 +2822,12 @@ def test_cancel_data_labeling_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_cancel_data_labeling_job_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3152,16 +2849,11 @@ async def test_cancel_data_labeling_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_cancel_data_labeling_job_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3172,9 +2864,7 @@ def test_cancel_data_labeling_job_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.cancel_data_labeling_job( - name="name_value", - ) + client.cancel_data_labeling_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -3185,24 +2875,19 @@ def test_cancel_data_labeling_job_flattened(): def test_cancel_data_labeling_job_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.cancel_data_labeling_job( - job_service.CancelDataLabelingJobRequest(), - name="name_value", + job_service.CancelDataLabelingJobRequest(), name="name_value", ) @pytest.mark.asyncio async def test_cancel_data_labeling_job_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3214,9 +2899,7 @@ async def test_cancel_data_labeling_job_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.cancel_data_labeling_job( - name="name_value", - ) + response = await client.cancel_data_labeling_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -3228,16 +2911,13 @@ async def test_cancel_data_labeling_job_flattened_async(): @pytest.mark.asyncio async def test_cancel_data_labeling_job_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.cancel_data_labeling_job( - job_service.CancelDataLabelingJobRequest(), - name="name_value", + job_service.CancelDataLabelingJobRequest(), name="name_value", ) @@ -3246,8 +2926,7 @@ def test_create_hyperparameter_tuning_job( request_type=job_service.CreateHyperparameterTuningJobRequest, ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3303,8 +2982,7 @@ async def test_create_hyperparameter_tuning_job_async( request_type=job_service.CreateHyperparameterTuningJobRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3357,9 +3035,7 @@ async def test_create_hyperparameter_tuning_job_async_from_dict(): def test_create_hyperparameter_tuning_job_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3381,17 +3057,12 @@ def test_create_hyperparameter_tuning_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_create_hyperparameter_tuning_job_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3415,16 +3086,11 @@ async def test_create_hyperparameter_tuning_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_create_hyperparameter_tuning_job_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3457,9 +3123,7 @@ def test_create_hyperparameter_tuning_job_flattened(): def test_create_hyperparameter_tuning_job_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3475,9 +3139,7 @@ def test_create_hyperparameter_tuning_job_flattened_error(): @pytest.mark.asyncio async def test_create_hyperparameter_tuning_job_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3514,9 +3176,7 @@ async def test_create_hyperparameter_tuning_job_flattened_async(): @pytest.mark.asyncio async def test_create_hyperparameter_tuning_job_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3534,8 +3194,7 @@ def test_get_hyperparameter_tuning_job( transport: str = "grpc", request_type=job_service.GetHyperparameterTuningJobRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3591,8 +3250,7 @@ async def test_get_hyperparameter_tuning_job_async( request_type=job_service.GetHyperparameterTuningJobRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3645,9 +3303,7 @@ async def test_get_hyperparameter_tuning_job_async_from_dict(): def test_get_hyperparameter_tuning_job_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3669,17 +3325,12 @@ def test_get_hyperparameter_tuning_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_hyperparameter_tuning_job_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3703,16 +3354,11 @@ async def test_get_hyperparameter_tuning_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_hyperparameter_tuning_job_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3723,9 +3369,7 @@ def test_get_hyperparameter_tuning_job_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_hyperparameter_tuning_job( - name="name_value", - ) + client.get_hyperparameter_tuning_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -3736,24 +3380,19 @@ def test_get_hyperparameter_tuning_job_flattened(): def test_get_hyperparameter_tuning_job_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_hyperparameter_tuning_job( - job_service.GetHyperparameterTuningJobRequest(), - name="name_value", + job_service.GetHyperparameterTuningJobRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_hyperparameter_tuning_job_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3767,9 +3406,7 @@ async def test_get_hyperparameter_tuning_job_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_hyperparameter_tuning_job( - name="name_value", - ) + response = await client.get_hyperparameter_tuning_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -3781,16 +3418,13 @@ async def test_get_hyperparameter_tuning_job_flattened_async(): @pytest.mark.asyncio async def test_get_hyperparameter_tuning_job_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_hyperparameter_tuning_job( - job_service.GetHyperparameterTuningJobRequest(), - name="name_value", + job_service.GetHyperparameterTuningJobRequest(), name="name_value", ) @@ -3799,8 +3433,7 @@ def test_list_hyperparameter_tuning_jobs( request_type=job_service.ListHyperparameterTuningJobsRequest, ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3841,8 +3474,7 @@ async def test_list_hyperparameter_tuning_jobs_async( request_type=job_service.ListHyperparameterTuningJobsRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3880,9 +3512,7 @@ async def test_list_hyperparameter_tuning_jobs_async_from_dict(): def test_list_hyperparameter_tuning_jobs_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3904,17 +3534,12 @@ def test_list_hyperparameter_tuning_jobs_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_hyperparameter_tuning_jobs_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3938,16 +3563,11 @@ async def test_list_hyperparameter_tuning_jobs_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_hyperparameter_tuning_jobs_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3958,9 +3578,7 @@ def test_list_hyperparameter_tuning_jobs_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_hyperparameter_tuning_jobs( - parent="parent_value", - ) + client.list_hyperparameter_tuning_jobs(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -3971,24 +3589,19 @@ def test_list_hyperparameter_tuning_jobs_flattened(): def test_list_hyperparameter_tuning_jobs_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_hyperparameter_tuning_jobs( - job_service.ListHyperparameterTuningJobsRequest(), - parent="parent_value", + job_service.ListHyperparameterTuningJobsRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_hyperparameter_tuning_jobs_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4002,9 +3615,7 @@ async def test_list_hyperparameter_tuning_jobs_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_hyperparameter_tuning_jobs( - parent="parent_value", - ) + response = await client.list_hyperparameter_tuning_jobs(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -4016,23 +3627,18 @@ async def test_list_hyperparameter_tuning_jobs_flattened_async(): @pytest.mark.asyncio async def test_list_hyperparameter_tuning_jobs_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_hyperparameter_tuning_jobs( - job_service.ListHyperparameterTuningJobsRequest(), - parent="parent_value", + job_service.ListHyperparameterTuningJobsRequest(), parent="parent_value", ) -def test_list_hyperparameter_tuning_jobs_pager(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials, - ) +def test_list_hyperparameter_tuning_jobs_pager(): + client = JobServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4049,8 +3655,7 @@ def test_list_hyperparameter_tuning_jobs_pager(): next_page_token="abc", ), job_service.ListHyperparameterTuningJobsResponse( - hyperparameter_tuning_jobs=[], - next_page_token="def", + hyperparameter_tuning_jobs=[], next_page_token="def", ), job_service.ListHyperparameterTuningJobsResponse( hyperparameter_tuning_jobs=[ @@ -4084,9 +3689,7 @@ def test_list_hyperparameter_tuning_jobs_pager(): def test_list_hyperparameter_tuning_jobs_pages(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4103,8 +3706,7 @@ def test_list_hyperparameter_tuning_jobs_pages(): next_page_token="abc", ), job_service.ListHyperparameterTuningJobsResponse( - hyperparameter_tuning_jobs=[], - next_page_token="def", + hyperparameter_tuning_jobs=[], next_page_token="def", ), job_service.ListHyperparameterTuningJobsResponse( hyperparameter_tuning_jobs=[ @@ -4127,9 +3729,7 @@ def test_list_hyperparameter_tuning_jobs_pages(): @pytest.mark.asyncio async def test_list_hyperparameter_tuning_jobs_async_pager(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4148,8 +3748,7 @@ async def test_list_hyperparameter_tuning_jobs_async_pager(): next_page_token="abc", ), job_service.ListHyperparameterTuningJobsResponse( - hyperparameter_tuning_jobs=[], - next_page_token="def", + hyperparameter_tuning_jobs=[], next_page_token="def", ), job_service.ListHyperparameterTuningJobsResponse( hyperparameter_tuning_jobs=[ @@ -4165,9 +3764,7 @@ async def test_list_hyperparameter_tuning_jobs_async_pager(): ), RuntimeError, ) - async_pager = await client.list_hyperparameter_tuning_jobs( - request={}, - ) + async_pager = await client.list_hyperparameter_tuning_jobs(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -4182,9 +3779,7 @@ async def test_list_hyperparameter_tuning_jobs_async_pager(): @pytest.mark.asyncio async def test_list_hyperparameter_tuning_jobs_async_pages(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4203,8 +3798,7 @@ async def test_list_hyperparameter_tuning_jobs_async_pages(): next_page_token="abc", ), job_service.ListHyperparameterTuningJobsResponse( - hyperparameter_tuning_jobs=[], - next_page_token="def", + hyperparameter_tuning_jobs=[], next_page_token="def", ), job_service.ListHyperparameterTuningJobsResponse( hyperparameter_tuning_jobs=[ @@ -4234,8 +3828,7 @@ def test_delete_hyperparameter_tuning_job( request_type=job_service.DeleteHyperparameterTuningJobRequest, ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4271,8 +3864,7 @@ async def test_delete_hyperparameter_tuning_job_async( request_type=job_service.DeleteHyperparameterTuningJobRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4306,9 +3898,7 @@ async def test_delete_hyperparameter_tuning_job_async_from_dict(): def test_delete_hyperparameter_tuning_job_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -4330,17 +3920,12 @@ def test_delete_hyperparameter_tuning_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_delete_hyperparameter_tuning_job_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -4364,16 +3949,11 @@ async def test_delete_hyperparameter_tuning_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_delete_hyperparameter_tuning_job_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4384,9 +3964,7 @@ def test_delete_hyperparameter_tuning_job_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_hyperparameter_tuning_job( - name="name_value", - ) + client.delete_hyperparameter_tuning_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -4397,24 +3975,19 @@ def test_delete_hyperparameter_tuning_job_flattened(): def test_delete_hyperparameter_tuning_job_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_hyperparameter_tuning_job( - job_service.DeleteHyperparameterTuningJobRequest(), - name="name_value", + job_service.DeleteHyperparameterTuningJobRequest(), name="name_value", ) @pytest.mark.asyncio async def test_delete_hyperparameter_tuning_job_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4428,9 +4001,7 @@ async def test_delete_hyperparameter_tuning_job_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_hyperparameter_tuning_job( - name="name_value", - ) + response = await client.delete_hyperparameter_tuning_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -4442,16 +4013,13 @@ async def test_delete_hyperparameter_tuning_job_flattened_async(): @pytest.mark.asyncio async def test_delete_hyperparameter_tuning_job_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_hyperparameter_tuning_job( - job_service.DeleteHyperparameterTuningJobRequest(), - name="name_value", + job_service.DeleteHyperparameterTuningJobRequest(), name="name_value", ) @@ -4460,8 +4028,7 @@ def test_cancel_hyperparameter_tuning_job( request_type=job_service.CancelHyperparameterTuningJobRequest, ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4497,8 +4064,7 @@ async def test_cancel_hyperparameter_tuning_job_async( request_type=job_service.CancelHyperparameterTuningJobRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4530,9 +4096,7 @@ async def test_cancel_hyperparameter_tuning_job_async_from_dict(): def test_cancel_hyperparameter_tuning_job_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -4554,17 +4118,12 @@ def test_cancel_hyperparameter_tuning_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_cancel_hyperparameter_tuning_job_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -4586,16 +4145,11 @@ async def test_cancel_hyperparameter_tuning_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_cancel_hyperparameter_tuning_job_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4606,9 +4160,7 @@ def test_cancel_hyperparameter_tuning_job_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.cancel_hyperparameter_tuning_job( - name="name_value", - ) + client.cancel_hyperparameter_tuning_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -4619,24 +4171,19 @@ def test_cancel_hyperparameter_tuning_job_flattened(): def test_cancel_hyperparameter_tuning_job_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.cancel_hyperparameter_tuning_job( - job_service.CancelHyperparameterTuningJobRequest(), - name="name_value", + job_service.CancelHyperparameterTuningJobRequest(), name="name_value", ) @pytest.mark.asyncio async def test_cancel_hyperparameter_tuning_job_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4648,9 +4195,7 @@ async def test_cancel_hyperparameter_tuning_job_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.cancel_hyperparameter_tuning_job( - name="name_value", - ) + response = await client.cancel_hyperparameter_tuning_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -4662,16 +4207,13 @@ async def test_cancel_hyperparameter_tuning_job_flattened_async(): @pytest.mark.asyncio async def test_cancel_hyperparameter_tuning_job_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.cancel_hyperparameter_tuning_job( - job_service.CancelHyperparameterTuningJobRequest(), - name="name_value", + job_service.CancelHyperparameterTuningJobRequest(), name="name_value", ) @@ -4679,8 +4221,7 @@ def test_create_batch_prediction_job( transport: str = "grpc", request_type=job_service.CreateBatchPredictionJobRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4733,8 +4274,7 @@ async def test_create_batch_prediction_job_async( request_type=job_service.CreateBatchPredictionJobRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4784,9 +4324,7 @@ async def test_create_batch_prediction_job_async_from_dict(): def test_create_batch_prediction_job_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -4808,17 +4346,12 @@ def test_create_batch_prediction_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_create_batch_prediction_job_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -4842,16 +4375,11 @@ async def test_create_batch_prediction_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_create_batch_prediction_job_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4884,9 +4412,7 @@ def test_create_batch_prediction_job_flattened(): def test_create_batch_prediction_job_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4902,9 +4428,7 @@ def test_create_batch_prediction_job_flattened_error(): @pytest.mark.asyncio async def test_create_batch_prediction_job_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4941,9 +4465,7 @@ async def test_create_batch_prediction_job_flattened_async(): @pytest.mark.asyncio async def test_create_batch_prediction_job_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4961,8 +4483,7 @@ def test_get_batch_prediction_job( transport: str = "grpc", request_type=job_service.GetBatchPredictionJobRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5015,8 +4536,7 @@ async def test_get_batch_prediction_job_async( request_type=job_service.GetBatchPredictionJobRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5066,9 +4586,7 @@ async def test_get_batch_prediction_job_async_from_dict(): def test_get_batch_prediction_job_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -5090,17 +4608,12 @@ def test_get_batch_prediction_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_batch_prediction_job_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -5124,16 +4637,11 @@ async def test_get_batch_prediction_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_batch_prediction_job_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5144,9 +4652,7 @@ def test_get_batch_prediction_job_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_batch_prediction_job( - name="name_value", - ) + client.get_batch_prediction_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -5157,24 +4663,19 @@ def test_get_batch_prediction_job_flattened(): def test_get_batch_prediction_job_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_batch_prediction_job( - job_service.GetBatchPredictionJobRequest(), - name="name_value", + job_service.GetBatchPredictionJobRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_batch_prediction_job_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5188,9 +4689,7 @@ async def test_get_batch_prediction_job_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_batch_prediction_job( - name="name_value", - ) + response = await client.get_batch_prediction_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -5202,16 +4701,13 @@ async def test_get_batch_prediction_job_flattened_async(): @pytest.mark.asyncio async def test_get_batch_prediction_job_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_batch_prediction_job( - job_service.GetBatchPredictionJobRequest(), - name="name_value", + job_service.GetBatchPredictionJobRequest(), name="name_value", ) @@ -5219,8 +4715,7 @@ def test_list_batch_prediction_jobs( transport: str = "grpc", request_type=job_service.ListBatchPredictionJobsRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5261,8 +4756,7 @@ async def test_list_batch_prediction_jobs_async( request_type=job_service.ListBatchPredictionJobsRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5300,9 +4794,7 @@ async def test_list_batch_prediction_jobs_async_from_dict(): def test_list_batch_prediction_jobs_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -5324,17 +4816,12 @@ def test_list_batch_prediction_jobs_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_batch_prediction_jobs_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -5358,16 +4845,11 @@ async def test_list_batch_prediction_jobs_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_batch_prediction_jobs_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5378,9 +4860,7 @@ def test_list_batch_prediction_jobs_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_batch_prediction_jobs( - parent="parent_value", - ) + client.list_batch_prediction_jobs(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -5391,24 +4871,19 @@ def test_list_batch_prediction_jobs_flattened(): def test_list_batch_prediction_jobs_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_batch_prediction_jobs( - job_service.ListBatchPredictionJobsRequest(), - parent="parent_value", + job_service.ListBatchPredictionJobsRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_batch_prediction_jobs_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5422,9 +4897,7 @@ async def test_list_batch_prediction_jobs_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_batch_prediction_jobs( - parent="parent_value", - ) + response = await client.list_batch_prediction_jobs(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -5436,23 +4909,18 @@ async def test_list_batch_prediction_jobs_flattened_async(): @pytest.mark.asyncio async def test_list_batch_prediction_jobs_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_batch_prediction_jobs( - job_service.ListBatchPredictionJobsRequest(), - parent="parent_value", + job_service.ListBatchPredictionJobsRequest(), parent="parent_value", ) def test_list_batch_prediction_jobs_pager(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5469,13 +4937,10 @@ def test_list_batch_prediction_jobs_pager(): next_page_token="abc", ), job_service.ListBatchPredictionJobsResponse( - batch_prediction_jobs=[], - next_page_token="def", + batch_prediction_jobs=[], next_page_token="def", ), job_service.ListBatchPredictionJobsResponse( - batch_prediction_jobs=[ - batch_prediction_job.BatchPredictionJob(), - ], + batch_prediction_jobs=[batch_prediction_job.BatchPredictionJob(),], next_page_token="ghi", ), job_service.ListBatchPredictionJobsResponse( @@ -5503,9 +4968,7 @@ def test_list_batch_prediction_jobs_pager(): def test_list_batch_prediction_jobs_pages(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5522,13 +4985,10 @@ def test_list_batch_prediction_jobs_pages(): next_page_token="abc", ), job_service.ListBatchPredictionJobsResponse( - batch_prediction_jobs=[], - next_page_token="def", + batch_prediction_jobs=[], next_page_token="def", ), job_service.ListBatchPredictionJobsResponse( - batch_prediction_jobs=[ - batch_prediction_job.BatchPredictionJob(), - ], + batch_prediction_jobs=[batch_prediction_job.BatchPredictionJob(),], next_page_token="ghi", ), job_service.ListBatchPredictionJobsResponse( @@ -5546,9 +5006,7 @@ def test_list_batch_prediction_jobs_pages(): @pytest.mark.asyncio async def test_list_batch_prediction_jobs_async_pager(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5567,13 +5025,10 @@ async def test_list_batch_prediction_jobs_async_pager(): next_page_token="abc", ), job_service.ListBatchPredictionJobsResponse( - batch_prediction_jobs=[], - next_page_token="def", + batch_prediction_jobs=[], next_page_token="def", ), job_service.ListBatchPredictionJobsResponse( - batch_prediction_jobs=[ - batch_prediction_job.BatchPredictionJob(), - ], + batch_prediction_jobs=[batch_prediction_job.BatchPredictionJob(),], next_page_token="ghi", ), job_service.ListBatchPredictionJobsResponse( @@ -5584,9 +5039,7 @@ async def test_list_batch_prediction_jobs_async_pager(): ), RuntimeError, ) - async_pager = await client.list_batch_prediction_jobs( - request={}, - ) + async_pager = await client.list_batch_prediction_jobs(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -5600,9 +5053,7 @@ async def test_list_batch_prediction_jobs_async_pager(): @pytest.mark.asyncio async def test_list_batch_prediction_jobs_async_pages(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5621,13 +5072,10 @@ async def test_list_batch_prediction_jobs_async_pages(): next_page_token="abc", ), job_service.ListBatchPredictionJobsResponse( - batch_prediction_jobs=[], - next_page_token="def", + batch_prediction_jobs=[], next_page_token="def", ), job_service.ListBatchPredictionJobsResponse( - batch_prediction_jobs=[ - batch_prediction_job.BatchPredictionJob(), - ], + batch_prediction_jobs=[batch_prediction_job.BatchPredictionJob(),], next_page_token="ghi", ), job_service.ListBatchPredictionJobsResponse( @@ -5649,8 +5097,7 @@ def test_delete_batch_prediction_job( transport: str = "grpc", request_type=job_service.DeleteBatchPredictionJobRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5686,8 +5133,7 @@ async def test_delete_batch_prediction_job_async( request_type=job_service.DeleteBatchPredictionJobRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5721,9 +5167,7 @@ async def test_delete_batch_prediction_job_async_from_dict(): def test_delete_batch_prediction_job_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -5745,17 +5189,12 @@ def test_delete_batch_prediction_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_delete_batch_prediction_job_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -5779,16 +5218,11 @@ async def test_delete_batch_prediction_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_delete_batch_prediction_job_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5799,9 +5233,7 @@ def test_delete_batch_prediction_job_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_batch_prediction_job( - name="name_value", - ) + client.delete_batch_prediction_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -5812,24 +5244,19 @@ def test_delete_batch_prediction_job_flattened(): def test_delete_batch_prediction_job_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_batch_prediction_job( - job_service.DeleteBatchPredictionJobRequest(), - name="name_value", + job_service.DeleteBatchPredictionJobRequest(), name="name_value", ) @pytest.mark.asyncio async def test_delete_batch_prediction_job_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5843,9 +5270,7 @@ async def test_delete_batch_prediction_job_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_batch_prediction_job( - name="name_value", - ) + response = await client.delete_batch_prediction_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -5857,16 +5282,13 @@ async def test_delete_batch_prediction_job_flattened_async(): @pytest.mark.asyncio async def test_delete_batch_prediction_job_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_batch_prediction_job( - job_service.DeleteBatchPredictionJobRequest(), - name="name_value", + job_service.DeleteBatchPredictionJobRequest(), name="name_value", ) @@ -5874,8 +5296,7 @@ def test_cancel_batch_prediction_job( transport: str = "grpc", request_type=job_service.CancelBatchPredictionJobRequest ): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5911,8 +5332,7 @@ async def test_cancel_batch_prediction_job_async( request_type=job_service.CancelBatchPredictionJobRequest, ): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5944,9 +5364,7 @@ async def test_cancel_batch_prediction_job_async_from_dict(): def test_cancel_batch_prediction_job_field_headers(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -5968,17 +5386,12 @@ def test_cancel_batch_prediction_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_cancel_batch_prediction_job_field_headers_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -6000,16 +5413,11 @@ async def test_cancel_batch_prediction_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_cancel_batch_prediction_job_flattened(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6020,9 +5428,7 @@ def test_cancel_batch_prediction_job_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.cancel_batch_prediction_job( - name="name_value", - ) + client.cancel_batch_prediction_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -6033,24 +5439,19 @@ def test_cancel_batch_prediction_job_flattened(): def test_cancel_batch_prediction_job_flattened_error(): - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.cancel_batch_prediction_job( - job_service.CancelBatchPredictionJobRequest(), - name="name_value", + job_service.CancelBatchPredictionJobRequest(), name="name_value", ) @pytest.mark.asyncio async def test_cancel_batch_prediction_job_flattened_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6062,9 +5463,7 @@ async def test_cancel_batch_prediction_job_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.cancel_batch_prediction_job( - name="name_value", - ) + response = await client.cancel_batch_prediction_job(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -6076,16 +5475,13 @@ async def test_cancel_batch_prediction_job_flattened_async(): @pytest.mark.asyncio async def test_cancel_batch_prediction_job_flattened_error_async(): - client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = JobServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.cancel_batch_prediction_job( - job_service.CancelBatchPredictionJobRequest(), - name="name_value", + job_service.CancelBatchPredictionJobRequest(), name="name_value", ) @@ -6096,8 +5492,7 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -6116,8 +5511,7 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = JobServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + client_options={"scopes": ["1", "2"]}, transport=transport, ) @@ -6159,13 +5553,8 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.JobServiceGrpcTransport, - ) + client = JobServiceClient(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client.transport, transports.JobServiceGrpcTransport,) def test_job_service_base_transport_error(): @@ -6231,8 +5620,7 @@ def test_job_service_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (credentials.AnonymousCredentials(), None) transport = transports.JobServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", + credentials_file="credentials.json", quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -6302,8 +5690,7 @@ def test_job_service_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.JobServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -6315,8 +5702,7 @@ def test_job_service_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.JobServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -6405,16 +5791,12 @@ def test_job_service_transport_channel_mtls_with_adc(transport_class): def test_job_service_grpc_lro_client(): client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - transport="grpc", + credentials=credentials.AnonymousCredentials(), transport="grpc", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) + assert isinstance(transport.operations_client, operations_v1.OperationsClient,) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -6422,16 +5804,12 @@ def test_job_service_grpc_lro_client(): def test_job_service_grpc_lro_async_client(): client = JobServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport="grpc_asyncio", + credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) + assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -6443,9 +5821,7 @@ def test_batch_prediction_job_path(): batch_prediction_job = "whelk" expected = "projects/{project}/locations/{location}/batchPredictionJobs/{batch_prediction_job}".format( - project=project, - location=location, - batch_prediction_job=batch_prediction_job, + project=project, location=location, batch_prediction_job=batch_prediction_job, ) actual = JobServiceClient.batch_prediction_job_path( project, location, batch_prediction_job @@ -6472,9 +5848,7 @@ def test_custom_job_path(): custom_job = "winkle" expected = "projects/{project}/locations/{location}/customJobs/{custom_job}".format( - project=project, - location=location, - custom_job=custom_job, + project=project, location=location, custom_job=custom_job, ) actual = JobServiceClient.custom_job_path(project, location, custom_job) assert expected == actual @@ -6499,9 +5873,7 @@ def test_data_labeling_job_path(): data_labeling_job = "whelk" expected = "projects/{project}/locations/{location}/dataLabelingJobs/{data_labeling_job}".format( - project=project, - location=location, - data_labeling_job=data_labeling_job, + project=project, location=location, data_labeling_job=data_labeling_job, ) actual = JobServiceClient.data_labeling_job_path( project, location, data_labeling_job @@ -6528,9 +5900,7 @@ def test_dataset_path(): dataset = "winkle" expected = "projects/{project}/locations/{location}/datasets/{dataset}".format( - project=project, - location=location, - dataset=dataset, + project=project, location=location, dataset=dataset, ) actual = JobServiceClient.dataset_path(project, location, dataset) assert expected == actual @@ -6584,9 +5954,7 @@ def test_model_path(): model = "winkle" expected = "projects/{project}/locations/{location}/models/{model}".format( - project=project, - location=location, - model=model, + project=project, location=location, model=model, ) actual = JobServiceClient.model_path(project, location, model) assert expected == actual @@ -6629,9 +5997,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format( - folder=folder, - ) + expected = "folders/{folder}".format(folder=folder,) actual = JobServiceClient.common_folder_path(folder) assert expected == actual @@ -6650,9 +6016,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format( - organization=organization, - ) + expected = "organizations/{organization}".format(organization=organization,) actual = JobServiceClient.common_organization_path(organization) assert expected == actual @@ -6671,9 +6035,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format( - project=project, - ) + expected = "projects/{project}".format(project=project,) actual = JobServiceClient.common_project_path(project) assert expected == actual @@ -6694,8 +6056,7 @@ def test_common_location_path(): location = "nautilus" expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, + project=project, location=location, ) actual = JobServiceClient.common_location_path(project, location) assert expected == actual @@ -6720,8 +6081,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.JobServiceTransport, "_prep_wrapped_messages" ) as prep: client = JobServiceClient( - credentials=credentials.AnonymousCredentials(), - client_info=client_info, + credentials=credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -6730,7 +6090,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = JobServiceClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), - client_info=client_info, + credentials=credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_migration_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_migration_service.py index 23f17a54b6..85e6a2d362 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_migration_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_migration_service.py @@ -393,9 +393,7 @@ def test_migration_service_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) + options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) @@ -464,8 +462,7 @@ def test_search_migratable_resources( request_type=migration_service.SearchMigratableResourcesRequest, ): client = MigrationServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -506,8 +503,7 @@ async def test_search_migratable_resources_async( request_type=migration_service.SearchMigratableResourcesRequest, ): client = MigrationServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -545,9 +541,7 @@ async def test_search_migratable_resources_async_from_dict(): def test_search_migratable_resources_field_headers(): - client = MigrationServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = MigrationServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -569,10 +563,7 @@ def test_search_migratable_resources_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio @@ -603,16 +594,11 @@ async def test_search_migratable_resources_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_search_migratable_resources_flattened(): - client = MigrationServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = MigrationServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -623,9 +609,7 @@ def test_search_migratable_resources_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.search_migratable_resources( - parent="parent_value", - ) + client.search_migratable_resources(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -636,16 +620,13 @@ def test_search_migratable_resources_flattened(): def test_search_migratable_resources_flattened_error(): - client = MigrationServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = MigrationServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.search_migratable_resources( - migration_service.SearchMigratableResourcesRequest(), - parent="parent_value", + migration_service.SearchMigratableResourcesRequest(), parent="parent_value", ) @@ -667,9 +648,7 @@ async def test_search_migratable_resources_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.search_migratable_resources( - parent="parent_value", - ) + response = await client.search_migratable_resources(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -689,15 +668,12 @@ async def test_search_migratable_resources_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.search_migratable_resources( - migration_service.SearchMigratableResourcesRequest(), - parent="parent_value", + migration_service.SearchMigratableResourcesRequest(), parent="parent_value", ) def test_search_migratable_resources_pager(): - client = MigrationServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = MigrationServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -714,13 +690,10 @@ def test_search_migratable_resources_pager(): next_page_token="abc", ), migration_service.SearchMigratableResourcesResponse( - migratable_resources=[], - next_page_token="def", + migratable_resources=[], next_page_token="def", ), migration_service.SearchMigratableResourcesResponse( - migratable_resources=[ - migratable_resource.MigratableResource(), - ], + migratable_resources=[migratable_resource.MigratableResource(),], next_page_token="ghi", ), migration_service.SearchMigratableResourcesResponse( @@ -748,9 +721,7 @@ def test_search_migratable_resources_pager(): def test_search_migratable_resources_pages(): - client = MigrationServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = MigrationServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -767,13 +738,10 @@ def test_search_migratable_resources_pages(): next_page_token="abc", ), migration_service.SearchMigratableResourcesResponse( - migratable_resources=[], - next_page_token="def", + migratable_resources=[], next_page_token="def", ), migration_service.SearchMigratableResourcesResponse( - migratable_resources=[ - migratable_resource.MigratableResource(), - ], + migratable_resources=[migratable_resource.MigratableResource(),], next_page_token="ghi", ), migration_service.SearchMigratableResourcesResponse( @@ -791,9 +759,7 @@ def test_search_migratable_resources_pages(): @pytest.mark.asyncio async def test_search_migratable_resources_async_pager(): - client = MigrationServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = MigrationServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -812,13 +778,10 @@ async def test_search_migratable_resources_async_pager(): next_page_token="abc", ), migration_service.SearchMigratableResourcesResponse( - migratable_resources=[], - next_page_token="def", + migratable_resources=[], next_page_token="def", ), migration_service.SearchMigratableResourcesResponse( - migratable_resources=[ - migratable_resource.MigratableResource(), - ], + migratable_resources=[migratable_resource.MigratableResource(),], next_page_token="ghi", ), migration_service.SearchMigratableResourcesResponse( @@ -829,9 +792,7 @@ async def test_search_migratable_resources_async_pager(): ), RuntimeError, ) - async_pager = await client.search_migratable_resources( - request={}, - ) + async_pager = await client.search_migratable_resources(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -845,9 +806,7 @@ async def test_search_migratable_resources_async_pager(): @pytest.mark.asyncio async def test_search_migratable_resources_async_pages(): - client = MigrationServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = MigrationServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -866,13 +825,10 @@ async def test_search_migratable_resources_async_pages(): next_page_token="abc", ), migration_service.SearchMigratableResourcesResponse( - migratable_resources=[], - next_page_token="def", + migratable_resources=[], next_page_token="def", ), migration_service.SearchMigratableResourcesResponse( - migratable_resources=[ - migratable_resource.MigratableResource(), - ], + migratable_resources=[migratable_resource.MigratableResource(),], next_page_token="ghi", ), migration_service.SearchMigratableResourcesResponse( @@ -894,8 +850,7 @@ def test_batch_migrate_resources( transport: str = "grpc", request_type=migration_service.BatchMigrateResourcesRequest ): client = MigrationServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -931,8 +886,7 @@ async def test_batch_migrate_resources_async( request_type=migration_service.BatchMigrateResourcesRequest, ): client = MigrationServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -966,9 +920,7 @@ async def test_batch_migrate_resources_async_from_dict(): def test_batch_migrate_resources_field_headers(): - client = MigrationServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = MigrationServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -990,10 +942,7 @@ def test_batch_migrate_resources_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio @@ -1024,16 +973,11 @@ async def test_batch_migrate_resources_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_batch_migrate_resources_flattened(): - client = MigrationServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = MigrationServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1072,9 +1016,7 @@ def test_batch_migrate_resources_flattened(): def test_batch_migrate_resources_flattened_error(): - client = MigrationServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = MigrationServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1166,8 +1108,7 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = MigrationServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -1186,8 +1127,7 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = MigrationServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + client_options={"scopes": ["1", "2"]}, transport=transport, ) @@ -1232,13 +1172,8 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = MigrationServiceClient( - credentials=credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.MigrationServiceGrpcTransport, - ) + client = MigrationServiceClient(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client.transport, transports.MigrationServiceGrpcTransport,) def test_migration_service_base_transport_error(): @@ -1286,8 +1221,7 @@ def test_migration_service_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (credentials.AnonymousCredentials(), None) transport = transports.MigrationServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", + credentials_file="credentials.json", quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -1357,8 +1291,7 @@ def test_migration_service_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.MigrationServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -1370,8 +1303,7 @@ def test_migration_service_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.MigrationServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -1468,16 +1400,12 @@ def test_migration_service_transport_channel_mtls_with_adc(transport_class): def test_migration_service_grpc_lro_client(): client = MigrationServiceClient( - credentials=credentials.AnonymousCredentials(), - transport="grpc", + credentials=credentials.AnonymousCredentials(), transport="grpc", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) + assert isinstance(transport.operations_client, operations_v1.OperationsClient,) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -1485,16 +1413,12 @@ def test_migration_service_grpc_lro_client(): def test_migration_service_grpc_lro_async_client(): client = MigrationServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport="grpc_asyncio", + credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) + assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -1506,9 +1430,7 @@ def test_annotated_dataset_path(): annotated_dataset = "whelk" expected = "projects/{project}/datasets/{dataset}/annotatedDatasets/{annotated_dataset}".format( - project=project, - dataset=dataset, - annotated_dataset=annotated_dataset, + project=project, dataset=dataset, annotated_dataset=annotated_dataset, ) actual = MigrationServiceClient.annotated_dataset_path( project, dataset, annotated_dataset @@ -1535,9 +1457,7 @@ def test_dataset_path(): dataset = "winkle" expected = "projects/{project}/locations/{location}/datasets/{dataset}".format( - project=project, - location=location, - dataset=dataset, + project=project, location=location, dataset=dataset, ) actual = MigrationServiceClient.dataset_path(project, location, dataset) assert expected == actual @@ -1561,8 +1481,7 @@ def test_dataset_path(): dataset = "clam" expected = "projects/{project}/datasets/{dataset}".format( - project=project, - dataset=dataset, + project=project, dataset=dataset, ) actual = MigrationServiceClient.dataset_path(project, dataset) assert expected == actual @@ -1586,9 +1505,7 @@ def test_dataset_path(): dataset = "cuttlefish" expected = "projects/{project}/locations/{location}/datasets/{dataset}".format( - project=project, - location=location, - dataset=dataset, + project=project, location=location, dataset=dataset, ) actual = MigrationServiceClient.dataset_path(project, location, dataset) assert expected == actual @@ -1613,9 +1530,7 @@ def test_model_path(): model = "squid" expected = "projects/{project}/locations/{location}/models/{model}".format( - project=project, - location=location, - model=model, + project=project, location=location, model=model, ) actual = MigrationServiceClient.model_path(project, location, model) assert expected == actual @@ -1640,9 +1555,7 @@ def test_model_path(): model = "cuttlefish" expected = "projects/{project}/locations/{location}/models/{model}".format( - project=project, - location=location, - model=model, + project=project, location=location, model=model, ) actual = MigrationServiceClient.model_path(project, location, model) assert expected == actual @@ -1667,9 +1580,7 @@ def test_version_path(): version = "squid" expected = "projects/{project}/models/{model}/versions/{version}".format( - project=project, - model=model, - version=version, + project=project, model=model, version=version, ) actual = MigrationServiceClient.version_path(project, model, version) assert expected == actual @@ -1712,9 +1623,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "cuttlefish" - expected = "folders/{folder}".format( - folder=folder, - ) + expected = "folders/{folder}".format(folder=folder,) actual = MigrationServiceClient.common_folder_path(folder) assert expected == actual @@ -1733,9 +1642,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "winkle" - expected = "organizations/{organization}".format( - organization=organization, - ) + expected = "organizations/{organization}".format(organization=organization,) actual = MigrationServiceClient.common_organization_path(organization) assert expected == actual @@ -1754,9 +1661,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "scallop" - expected = "projects/{project}".format( - project=project, - ) + expected = "projects/{project}".format(project=project,) actual = MigrationServiceClient.common_project_path(project) assert expected == actual @@ -1777,8 +1682,7 @@ def test_common_location_path(): location = "clam" expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, + project=project, location=location, ) actual = MigrationServiceClient.common_location_path(project, location) assert expected == actual @@ -1803,8 +1707,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.MigrationServiceTransport, "_prep_wrapped_messages" ) as prep: client = MigrationServiceClient( - credentials=credentials.AnonymousCredentials(), - client_info=client_info, + credentials=credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -1813,7 +1716,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = MigrationServiceClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), - client_info=client_info, + credentials=credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_model_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_model_service.py index 97b32a4e78..d05698a46a 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_model_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_model_service.py @@ -383,9 +383,7 @@ def test_model_service_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) + options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) @@ -451,8 +449,7 @@ def test_upload_model( transport: str = "grpc", request_type=model_service.UploadModelRequest ): client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -485,8 +482,7 @@ async def test_upload_model_async( transport: str = "grpc_asyncio", request_type=model_service.UploadModelRequest ): client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -518,9 +514,7 @@ async def test_upload_model_async_from_dict(): def test_upload_model_field_headers(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -540,17 +534,12 @@ def test_upload_model_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_upload_model_field_headers_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -572,16 +561,11 @@ async def test_upload_model_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_upload_model_flattened(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.upload_model), "__call__") as call: @@ -591,8 +575,7 @@ def test_upload_model_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.upload_model( - parent="parent_value", - model=gca_model.Model(name="name_value"), + parent="parent_value", model=gca_model.Model(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -606,9 +589,7 @@ def test_upload_model_flattened(): def test_upload_model_flattened_error(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -622,9 +603,7 @@ def test_upload_model_flattened_error(): @pytest.mark.asyncio async def test_upload_model_flattened_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.upload_model), "__call__") as call: @@ -637,8 +616,7 @@ async def test_upload_model_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.upload_model( - parent="parent_value", - model=gca_model.Model(name="name_value"), + parent="parent_value", model=gca_model.Model(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -653,9 +631,7 @@ async def test_upload_model_flattened_async(): @pytest.mark.asyncio async def test_upload_model_flattened_error_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -669,8 +645,7 @@ async def test_upload_model_flattened_error_async(): def test_get_model(transport: str = "grpc", request_type=model_service.GetModelRequest): client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -743,8 +718,7 @@ async def test_get_model_async( transport: str = "grpc_asyncio", request_type=model_service.GetModelRequest ): client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -819,9 +793,7 @@ async def test_get_model_async_from_dict(): def test_get_model_field_headers(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -841,17 +813,12 @@ def test_get_model_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_model_field_headers_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -871,16 +838,11 @@ async def test_get_model_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_model_flattened(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_model), "__call__") as call: @@ -889,9 +851,7 @@ def test_get_model_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_model( - name="name_value", - ) + client.get_model(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -902,24 +862,19 @@ def test_get_model_flattened(): def test_get_model_flattened_error(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_model( - model_service.GetModelRequest(), - name="name_value", + model_service.GetModelRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_model_flattened_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_model), "__call__") as call: @@ -929,9 +884,7 @@ async def test_get_model_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(model.Model()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_model( - name="name_value", - ) + response = await client.get_model(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -943,16 +896,13 @@ async def test_get_model_flattened_async(): @pytest.mark.asyncio async def test_get_model_flattened_error_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_model( - model_service.GetModelRequest(), - name="name_value", + model_service.GetModelRequest(), name="name_value", ) @@ -960,8 +910,7 @@ def test_list_models( transport: str = "grpc", request_type=model_service.ListModelsRequest ): client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -999,8 +948,7 @@ async def test_list_models_async( transport: str = "grpc_asyncio", request_type=model_service.ListModelsRequest ): client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1011,9 +959,7 @@ async def test_list_models_async( with mock.patch.object(type(client.transport.list_models), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - model_service.ListModelsResponse( - next_page_token="next_page_token_value", - ) + model_service.ListModelsResponse(next_page_token="next_page_token_value",) ) response = await client.list_models(request) @@ -1036,9 +982,7 @@ async def test_list_models_async_from_dict(): def test_list_models_field_headers(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1058,17 +1002,12 @@ def test_list_models_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_models_field_headers_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1090,16 +1029,11 @@ async def test_list_models_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_models_flattened(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_models), "__call__") as call: @@ -1108,9 +1042,7 @@ def test_list_models_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_models( - parent="parent_value", - ) + client.list_models(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1121,24 +1053,19 @@ def test_list_models_flattened(): def test_list_models_flattened_error(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_models( - model_service.ListModelsRequest(), - parent="parent_value", + model_service.ListModelsRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_models_flattened_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_models), "__call__") as call: @@ -1150,9 +1077,7 @@ async def test_list_models_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_models( - parent="parent_value", - ) + response = await client.list_models(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1164,52 +1089,32 @@ async def test_list_models_flattened_async(): @pytest.mark.asyncio async def test_list_models_flattened_error_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_models( - model_service.ListModelsRequest(), - parent="parent_value", + model_service.ListModelsRequest(), parent="parent_value", ) def test_list_models_pager(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_models), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( model_service.ListModelsResponse( - models=[ - model.Model(), - model.Model(), - model.Model(), - ], + models=[model.Model(), model.Model(), model.Model(),], next_page_token="abc", ), + model_service.ListModelsResponse(models=[], next_page_token="def",), model_service.ListModelsResponse( - models=[], - next_page_token="def", - ), - model_service.ListModelsResponse( - models=[ - model.Model(), - ], - next_page_token="ghi", - ), - model_service.ListModelsResponse( - models=[ - model.Model(), - model.Model(), - ], + models=[model.Model(),], next_page_token="ghi", ), + model_service.ListModelsResponse(models=[model.Model(), model.Model(),],), RuntimeError, ) @@ -1227,38 +1132,21 @@ def test_list_models_pager(): def test_list_models_pages(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_models), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( model_service.ListModelsResponse( - models=[ - model.Model(), - model.Model(), - model.Model(), - ], + models=[model.Model(), model.Model(), model.Model(),], next_page_token="abc", ), + model_service.ListModelsResponse(models=[], next_page_token="def",), model_service.ListModelsResponse( - models=[], - next_page_token="def", - ), - model_service.ListModelsResponse( - models=[ - model.Model(), - ], - next_page_token="ghi", - ), - model_service.ListModelsResponse( - models=[ - model.Model(), - model.Model(), - ], + models=[model.Model(),], next_page_token="ghi", ), + model_service.ListModelsResponse(models=[model.Model(), model.Model(),],), RuntimeError, ) pages = list(client.list_models(request={}).pages) @@ -1268,9 +1156,7 @@ def test_list_models_pages(): @pytest.mark.asyncio async def test_list_models_async_pager(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1279,34 +1165,17 @@ async def test_list_models_async_pager(): # Set the response to a series of pages. call.side_effect = ( model_service.ListModelsResponse( - models=[ - model.Model(), - model.Model(), - model.Model(), - ], + models=[model.Model(), model.Model(), model.Model(),], next_page_token="abc", ), + model_service.ListModelsResponse(models=[], next_page_token="def",), model_service.ListModelsResponse( - models=[], - next_page_token="def", - ), - model_service.ListModelsResponse( - models=[ - model.Model(), - ], - next_page_token="ghi", - ), - model_service.ListModelsResponse( - models=[ - model.Model(), - model.Model(), - ], + models=[model.Model(),], next_page_token="ghi", ), + model_service.ListModelsResponse(models=[model.Model(), model.Model(),],), RuntimeError, ) - async_pager = await client.list_models( - request={}, - ) + async_pager = await client.list_models(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -1318,9 +1187,7 @@ async def test_list_models_async_pager(): @pytest.mark.asyncio async def test_list_models_async_pages(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1329,29 +1196,14 @@ async def test_list_models_async_pages(): # Set the response to a series of pages. call.side_effect = ( model_service.ListModelsResponse( - models=[ - model.Model(), - model.Model(), - model.Model(), - ], + models=[model.Model(), model.Model(), model.Model(),], next_page_token="abc", ), + model_service.ListModelsResponse(models=[], next_page_token="def",), model_service.ListModelsResponse( - models=[], - next_page_token="def", - ), - model_service.ListModelsResponse( - models=[ - model.Model(), - ], - next_page_token="ghi", - ), - model_service.ListModelsResponse( - models=[ - model.Model(), - model.Model(), - ], + models=[model.Model(),], next_page_token="ghi", ), + model_service.ListModelsResponse(models=[model.Model(), model.Model(),],), RuntimeError, ) pages = [] @@ -1365,8 +1217,7 @@ def test_update_model( transport: str = "grpc", request_type=model_service.UpdateModelRequest ): client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1439,8 +1290,7 @@ async def test_update_model_async( transport: str = "grpc_asyncio", request_type=model_service.UpdateModelRequest ): client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1515,9 +1365,7 @@ async def test_update_model_async_from_dict(): def test_update_model_field_headers(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1537,17 +1385,12 @@ def test_update_model_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "model.name=model.name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "model.name=model.name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_update_model_field_headers_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1567,16 +1410,11 @@ async def test_update_model_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "model.name=model.name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "model.name=model.name/value",) in kw["metadata"] def test_update_model_flattened(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_model), "__call__") as call: @@ -1601,9 +1439,7 @@ def test_update_model_flattened(): def test_update_model_flattened_error(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1617,9 +1453,7 @@ def test_update_model_flattened_error(): @pytest.mark.asyncio async def test_update_model_flattened_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_model), "__call__") as call: @@ -1646,9 +1480,7 @@ async def test_update_model_flattened_async(): @pytest.mark.asyncio async def test_update_model_flattened_error_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1664,8 +1496,7 @@ def test_delete_model( transport: str = "grpc", request_type=model_service.DeleteModelRequest ): client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1698,8 +1529,7 @@ async def test_delete_model_async( transport: str = "grpc_asyncio", request_type=model_service.DeleteModelRequest ): client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1731,9 +1561,7 @@ async def test_delete_model_async_from_dict(): def test_delete_model_field_headers(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1753,17 +1581,12 @@ def test_delete_model_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_delete_model_field_headers_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1785,16 +1608,11 @@ async def test_delete_model_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_delete_model_flattened(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_model), "__call__") as call: @@ -1803,9 +1621,7 @@ def test_delete_model_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_model( - name="name_value", - ) + client.delete_model(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1816,24 +1632,19 @@ def test_delete_model_flattened(): def test_delete_model_flattened_error(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_model( - model_service.DeleteModelRequest(), - name="name_value", + model_service.DeleteModelRequest(), name="name_value", ) @pytest.mark.asyncio async def test_delete_model_flattened_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_model), "__call__") as call: @@ -1845,9 +1656,7 @@ async def test_delete_model_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_model( - name="name_value", - ) + response = await client.delete_model(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1859,16 +1668,13 @@ async def test_delete_model_flattened_async(): @pytest.mark.asyncio async def test_delete_model_flattened_error_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_model( - model_service.DeleteModelRequest(), - name="name_value", + model_service.DeleteModelRequest(), name="name_value", ) @@ -1876,8 +1682,7 @@ def test_export_model( transport: str = "grpc", request_type=model_service.ExportModelRequest ): client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1910,8 +1715,7 @@ async def test_export_model_async( transport: str = "grpc_asyncio", request_type=model_service.ExportModelRequest ): client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1943,9 +1747,7 @@ async def test_export_model_async_from_dict(): def test_export_model_field_headers(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1965,17 +1767,12 @@ def test_export_model_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_export_model_field_headers_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1997,16 +1794,11 @@ async def test_export_model_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_export_model_flattened(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.export_model), "__call__") as call: @@ -2035,9 +1827,7 @@ def test_export_model_flattened(): def test_export_model_flattened_error(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2053,9 +1843,7 @@ def test_export_model_flattened_error(): @pytest.mark.asyncio async def test_export_model_flattened_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.export_model), "__call__") as call: @@ -2088,9 +1876,7 @@ async def test_export_model_flattened_async(): @pytest.mark.asyncio async def test_export_model_flattened_error_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2108,8 +1894,7 @@ def test_get_model_evaluation( transport: str = "grpc", request_type=model_service.GetModelEvaluationRequest ): client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2156,8 +1941,7 @@ async def test_get_model_evaluation_async( request_type=model_service.GetModelEvaluationRequest, ): client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2201,9 +1985,7 @@ async def test_get_model_evaluation_async_from_dict(): def test_get_model_evaluation_field_headers(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2225,17 +2007,12 @@ def test_get_model_evaluation_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_model_evaluation_field_headers_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2259,16 +2036,11 @@ async def test_get_model_evaluation_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_model_evaluation_flattened(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2279,9 +2051,7 @@ def test_get_model_evaluation_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_model_evaluation( - name="name_value", - ) + client.get_model_evaluation(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -2292,24 +2062,19 @@ def test_get_model_evaluation_flattened(): def test_get_model_evaluation_flattened_error(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_model_evaluation( - model_service.GetModelEvaluationRequest(), - name="name_value", + model_service.GetModelEvaluationRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_model_evaluation_flattened_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2323,9 +2088,7 @@ async def test_get_model_evaluation_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_model_evaluation( - name="name_value", - ) + response = await client.get_model_evaluation(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -2337,16 +2100,13 @@ async def test_get_model_evaluation_flattened_async(): @pytest.mark.asyncio async def test_get_model_evaluation_flattened_error_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_model_evaluation( - model_service.GetModelEvaluationRequest(), - name="name_value", + model_service.GetModelEvaluationRequest(), name="name_value", ) @@ -2354,8 +2114,7 @@ def test_list_model_evaluations( transport: str = "grpc", request_type=model_service.ListModelEvaluationsRequest ): client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2396,8 +2155,7 @@ async def test_list_model_evaluations_async( request_type=model_service.ListModelEvaluationsRequest, ): client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2435,9 +2193,7 @@ async def test_list_model_evaluations_async_from_dict(): def test_list_model_evaluations_field_headers(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2459,17 +2215,12 @@ def test_list_model_evaluations_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_model_evaluations_field_headers_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2493,16 +2244,11 @@ async def test_list_model_evaluations_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_model_evaluations_flattened(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2513,9 +2259,7 @@ def test_list_model_evaluations_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_model_evaluations( - parent="parent_value", - ) + client.list_model_evaluations(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -2526,24 +2270,19 @@ def test_list_model_evaluations_flattened(): def test_list_model_evaluations_flattened_error(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_model_evaluations( - model_service.ListModelEvaluationsRequest(), - parent="parent_value", + model_service.ListModelEvaluationsRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_model_evaluations_flattened_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2557,9 +2296,7 @@ async def test_list_model_evaluations_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_model_evaluations( - parent="parent_value", - ) + response = await client.list_model_evaluations(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -2571,23 +2308,18 @@ async def test_list_model_evaluations_flattened_async(): @pytest.mark.asyncio async def test_list_model_evaluations_flattened_error_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_model_evaluations( - model_service.ListModelEvaluationsRequest(), - parent="parent_value", + model_service.ListModelEvaluationsRequest(), parent="parent_value", ) def test_list_model_evaluations_pager(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2604,13 +2336,10 @@ def test_list_model_evaluations_pager(): next_page_token="abc", ), model_service.ListModelEvaluationsResponse( - model_evaluations=[], - next_page_token="def", + model_evaluations=[], next_page_token="def", ), model_service.ListModelEvaluationsResponse( - model_evaluations=[ - model_evaluation.ModelEvaluation(), - ], + model_evaluations=[model_evaluation.ModelEvaluation(),], next_page_token="ghi", ), model_service.ListModelEvaluationsResponse( @@ -2636,9 +2365,7 @@ def test_list_model_evaluations_pager(): def test_list_model_evaluations_pages(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2655,13 +2382,10 @@ def test_list_model_evaluations_pages(): next_page_token="abc", ), model_service.ListModelEvaluationsResponse( - model_evaluations=[], - next_page_token="def", + model_evaluations=[], next_page_token="def", ), model_service.ListModelEvaluationsResponse( - model_evaluations=[ - model_evaluation.ModelEvaluation(), - ], + model_evaluations=[model_evaluation.ModelEvaluation(),], next_page_token="ghi", ), model_service.ListModelEvaluationsResponse( @@ -2679,9 +2403,7 @@ def test_list_model_evaluations_pages(): @pytest.mark.asyncio async def test_list_model_evaluations_async_pager(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2700,13 +2422,10 @@ async def test_list_model_evaluations_async_pager(): next_page_token="abc", ), model_service.ListModelEvaluationsResponse( - model_evaluations=[], - next_page_token="def", + model_evaluations=[], next_page_token="def", ), model_service.ListModelEvaluationsResponse( - model_evaluations=[ - model_evaluation.ModelEvaluation(), - ], + model_evaluations=[model_evaluation.ModelEvaluation(),], next_page_token="ghi", ), model_service.ListModelEvaluationsResponse( @@ -2717,9 +2436,7 @@ async def test_list_model_evaluations_async_pager(): ), RuntimeError, ) - async_pager = await client.list_model_evaluations( - request={}, - ) + async_pager = await client.list_model_evaluations(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -2731,9 +2448,7 @@ async def test_list_model_evaluations_async_pager(): @pytest.mark.asyncio async def test_list_model_evaluations_async_pages(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2752,13 +2467,10 @@ async def test_list_model_evaluations_async_pages(): next_page_token="abc", ), model_service.ListModelEvaluationsResponse( - model_evaluations=[], - next_page_token="def", + model_evaluations=[], next_page_token="def", ), model_service.ListModelEvaluationsResponse( - model_evaluations=[ - model_evaluation.ModelEvaluation(), - ], + model_evaluations=[model_evaluation.ModelEvaluation(),], next_page_token="ghi", ), model_service.ListModelEvaluationsResponse( @@ -2780,8 +2492,7 @@ def test_get_model_evaluation_slice( transport: str = "grpc", request_type=model_service.GetModelEvaluationSliceRequest ): client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2794,8 +2505,7 @@ def test_get_model_evaluation_slice( ) as call: # Designate an appropriate return value for the call. call.return_value = model_evaluation_slice.ModelEvaluationSlice( - name="name_value", - metrics_schema_uri="metrics_schema_uri_value", + name="name_value", metrics_schema_uri="metrics_schema_uri_value", ) response = client.get_model_evaluation_slice(request) @@ -2825,8 +2535,7 @@ async def test_get_model_evaluation_slice_async( request_type=model_service.GetModelEvaluationSliceRequest, ): client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2840,8 +2549,7 @@ async def test_get_model_evaluation_slice_async( # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( model_evaluation_slice.ModelEvaluationSlice( - name="name_value", - metrics_schema_uri="metrics_schema_uri_value", + name="name_value", metrics_schema_uri="metrics_schema_uri_value", ) ) @@ -2867,9 +2575,7 @@ async def test_get_model_evaluation_slice_async_from_dict(): def test_get_model_evaluation_slice_field_headers(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2891,17 +2597,12 @@ def test_get_model_evaluation_slice_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_model_evaluation_slice_field_headers_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2925,16 +2626,11 @@ async def test_get_model_evaluation_slice_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_model_evaluation_slice_flattened(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2945,9 +2641,7 @@ def test_get_model_evaluation_slice_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_model_evaluation_slice( - name="name_value", - ) + client.get_model_evaluation_slice(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -2958,24 +2652,19 @@ def test_get_model_evaluation_slice_flattened(): def test_get_model_evaluation_slice_flattened_error(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_model_evaluation_slice( - model_service.GetModelEvaluationSliceRequest(), - name="name_value", + model_service.GetModelEvaluationSliceRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_model_evaluation_slice_flattened_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2989,9 +2678,7 @@ async def test_get_model_evaluation_slice_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_model_evaluation_slice( - name="name_value", - ) + response = await client.get_model_evaluation_slice(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -3003,16 +2690,13 @@ async def test_get_model_evaluation_slice_flattened_async(): @pytest.mark.asyncio async def test_get_model_evaluation_slice_flattened_error_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_model_evaluation_slice( - model_service.GetModelEvaluationSliceRequest(), - name="name_value", + model_service.GetModelEvaluationSliceRequest(), name="name_value", ) @@ -3020,8 +2704,7 @@ def test_list_model_evaluation_slices( transport: str = "grpc", request_type=model_service.ListModelEvaluationSlicesRequest ): client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3062,8 +2745,7 @@ async def test_list_model_evaluation_slices_async( request_type=model_service.ListModelEvaluationSlicesRequest, ): client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3101,9 +2783,7 @@ async def test_list_model_evaluation_slices_async_from_dict(): def test_list_model_evaluation_slices_field_headers(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3125,17 +2805,12 @@ def test_list_model_evaluation_slices_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_model_evaluation_slices_field_headers_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3159,16 +2834,11 @@ async def test_list_model_evaluation_slices_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_model_evaluation_slices_flattened(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3179,9 +2849,7 @@ def test_list_model_evaluation_slices_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_model_evaluation_slices( - parent="parent_value", - ) + client.list_model_evaluation_slices(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -3192,24 +2860,19 @@ def test_list_model_evaluation_slices_flattened(): def test_list_model_evaluation_slices_flattened_error(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_model_evaluation_slices( - model_service.ListModelEvaluationSlicesRequest(), - parent="parent_value", + model_service.ListModelEvaluationSlicesRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_model_evaluation_slices_flattened_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3223,9 +2886,7 @@ async def test_list_model_evaluation_slices_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_model_evaluation_slices( - parent="parent_value", - ) + response = await client.list_model_evaluation_slices(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -3237,23 +2898,18 @@ async def test_list_model_evaluation_slices_flattened_async(): @pytest.mark.asyncio async def test_list_model_evaluation_slices_flattened_error_async(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_model_evaluation_slices( - model_service.ListModelEvaluationSlicesRequest(), - parent="parent_value", + model_service.ListModelEvaluationSlicesRequest(), parent="parent_value", ) def test_list_model_evaluation_slices_pager(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3270,8 +2926,7 @@ def test_list_model_evaluation_slices_pager(): next_page_token="abc", ), model_service.ListModelEvaluationSlicesResponse( - model_evaluation_slices=[], - next_page_token="def", + model_evaluation_slices=[], next_page_token="def", ), model_service.ListModelEvaluationSlicesResponse( model_evaluation_slices=[ @@ -3304,9 +2959,7 @@ def test_list_model_evaluation_slices_pager(): def test_list_model_evaluation_slices_pages(): - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3323,8 +2976,7 @@ def test_list_model_evaluation_slices_pages(): next_page_token="abc", ), model_service.ListModelEvaluationSlicesResponse( - model_evaluation_slices=[], - next_page_token="def", + model_evaluation_slices=[], next_page_token="def", ), model_service.ListModelEvaluationSlicesResponse( model_evaluation_slices=[ @@ -3347,9 +2999,7 @@ def test_list_model_evaluation_slices_pages(): @pytest.mark.asyncio async def test_list_model_evaluation_slices_async_pager(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3368,8 +3018,7 @@ async def test_list_model_evaluation_slices_async_pager(): next_page_token="abc", ), model_service.ListModelEvaluationSlicesResponse( - model_evaluation_slices=[], - next_page_token="def", + model_evaluation_slices=[], next_page_token="def", ), model_service.ListModelEvaluationSlicesResponse( model_evaluation_slices=[ @@ -3385,9 +3034,7 @@ async def test_list_model_evaluation_slices_async_pager(): ), RuntimeError, ) - async_pager = await client.list_model_evaluation_slices( - request={}, - ) + async_pager = await client.list_model_evaluation_slices(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -3402,9 +3049,7 @@ async def test_list_model_evaluation_slices_async_pager(): @pytest.mark.asyncio async def test_list_model_evaluation_slices_async_pages(): - client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = ModelServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3423,8 +3068,7 @@ async def test_list_model_evaluation_slices_async_pages(): next_page_token="abc", ), model_service.ListModelEvaluationSlicesResponse( - model_evaluation_slices=[], - next_page_token="def", + model_evaluation_slices=[], next_page_token="def", ), model_service.ListModelEvaluationSlicesResponse( model_evaluation_slices=[ @@ -3456,8 +3100,7 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -3476,8 +3119,7 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = ModelServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + client_options={"scopes": ["1", "2"]}, transport=transport, ) @@ -3519,13 +3161,8 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.ModelServiceGrpcTransport, - ) + client = ModelServiceClient(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client.transport, transports.ModelServiceGrpcTransport,) def test_model_service_base_transport_error(): @@ -3581,8 +3218,7 @@ def test_model_service_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (credentials.AnonymousCredentials(), None) transport = transports.ModelServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", + credentials_file="credentials.json", quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -3652,8 +3288,7 @@ def test_model_service_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.ModelServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -3665,8 +3300,7 @@ def test_model_service_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.ModelServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -3755,16 +3389,12 @@ def test_model_service_transport_channel_mtls_with_adc(transport_class): def test_model_service_grpc_lro_client(): client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - transport="grpc", + credentials=credentials.AnonymousCredentials(), transport="grpc", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) + assert isinstance(transport.operations_client, operations_v1.OperationsClient,) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -3772,16 +3402,12 @@ def test_model_service_grpc_lro_client(): def test_model_service_grpc_lro_async_client(): client = ModelServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport="grpc_asyncio", + credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) + assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -3793,9 +3419,7 @@ def test_endpoint_path(): endpoint = "whelk" expected = "projects/{project}/locations/{location}/endpoints/{endpoint}".format( - project=project, - location=location, - endpoint=endpoint, + project=project, location=location, endpoint=endpoint, ) actual = ModelServiceClient.endpoint_path(project, location, endpoint) assert expected == actual @@ -3820,9 +3444,7 @@ def test_model_path(): model = "winkle" expected = "projects/{project}/locations/{location}/models/{model}".format( - project=project, - location=location, - model=model, + project=project, location=location, model=model, ) actual = ModelServiceClient.model_path(project, location, model) assert expected == actual @@ -3848,10 +3470,7 @@ def test_model_evaluation_path(): evaluation = "octopus" expected = "projects/{project}/locations/{location}/models/{model}/evaluations/{evaluation}".format( - project=project, - location=location, - model=model, - evaluation=evaluation, + project=project, location=location, model=model, evaluation=evaluation, ) actual = ModelServiceClient.model_evaluation_path( project, location, model, evaluation @@ -3914,9 +3533,7 @@ def test_training_pipeline_path(): training_pipeline = "winkle" expected = "projects/{project}/locations/{location}/trainingPipelines/{training_pipeline}".format( - project=project, - location=location, - training_pipeline=training_pipeline, + project=project, location=location, training_pipeline=training_pipeline, ) actual = ModelServiceClient.training_pipeline_path( project, location, training_pipeline @@ -3961,9 +3578,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format( - folder=folder, - ) + expected = "folders/{folder}".format(folder=folder,) actual = ModelServiceClient.common_folder_path(folder) assert expected == actual @@ -3982,9 +3597,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format( - organization=organization, - ) + expected = "organizations/{organization}".format(organization=organization,) actual = ModelServiceClient.common_organization_path(organization) assert expected == actual @@ -4003,9 +3616,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format( - project=project, - ) + expected = "projects/{project}".format(project=project,) actual = ModelServiceClient.common_project_path(project) assert expected == actual @@ -4026,8 +3637,7 @@ def test_common_location_path(): location = "nautilus" expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, + project=project, location=location, ) actual = ModelServiceClient.common_location_path(project, location) assert expected == actual @@ -4052,8 +3662,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.ModelServiceTransport, "_prep_wrapped_messages" ) as prep: client = ModelServiceClient( - credentials=credentials.AnonymousCredentials(), - client_info=client_info, + credentials=credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -4062,7 +3671,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = ModelServiceClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), - client_info=client_info, + credentials=credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_pipeline_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_pipeline_service.py index 97e4132173..ada82b91c0 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_pipeline_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_pipeline_service.py @@ -408,9 +408,7 @@ def test_pipeline_service_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) + options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) @@ -478,8 +476,7 @@ def test_create_training_pipeline( transport: str = "grpc", request_type=pipeline_service.CreateTrainingPipelineRequest ): client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -529,8 +526,7 @@ async def test_create_training_pipeline_async( request_type=pipeline_service.CreateTrainingPipelineRequest, ): client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -577,9 +573,7 @@ async def test_create_training_pipeline_async_from_dict(): def test_create_training_pipeline_field_headers(): - client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -601,17 +595,12 @@ def test_create_training_pipeline_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_create_training_pipeline_field_headers_async(): - client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -635,16 +624,11 @@ async def test_create_training_pipeline_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_create_training_pipeline_flattened(): - client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -673,9 +657,7 @@ def test_create_training_pipeline_flattened(): def test_create_training_pipeline_flattened_error(): - client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -689,9 +671,7 @@ def test_create_training_pipeline_flattened_error(): @pytest.mark.asyncio async def test_create_training_pipeline_flattened_async(): - client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -724,9 +704,7 @@ async def test_create_training_pipeline_flattened_async(): @pytest.mark.asyncio async def test_create_training_pipeline_flattened_error_async(): - client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -742,8 +720,7 @@ def test_get_training_pipeline( transport: str = "grpc", request_type=pipeline_service.GetTrainingPipelineRequest ): client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -793,8 +770,7 @@ async def test_get_training_pipeline_async( request_type=pipeline_service.GetTrainingPipelineRequest, ): client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -841,9 +817,7 @@ async def test_get_training_pipeline_async_from_dict(): def test_get_training_pipeline_field_headers(): - client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -865,17 +839,12 @@ def test_get_training_pipeline_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_training_pipeline_field_headers_async(): - client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -899,16 +868,11 @@ async def test_get_training_pipeline_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_training_pipeline_flattened(): - client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -919,9 +883,7 @@ def test_get_training_pipeline_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_training_pipeline( - name="name_value", - ) + client.get_training_pipeline(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -932,24 +894,19 @@ def test_get_training_pipeline_flattened(): def test_get_training_pipeline_flattened_error(): - client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_training_pipeline( - pipeline_service.GetTrainingPipelineRequest(), - name="name_value", + pipeline_service.GetTrainingPipelineRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_training_pipeline_flattened_async(): - client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -963,9 +920,7 @@ async def test_get_training_pipeline_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_training_pipeline( - name="name_value", - ) + response = await client.get_training_pipeline(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -977,16 +932,13 @@ async def test_get_training_pipeline_flattened_async(): @pytest.mark.asyncio async def test_get_training_pipeline_flattened_error_async(): - client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_training_pipeline( - pipeline_service.GetTrainingPipelineRequest(), - name="name_value", + pipeline_service.GetTrainingPipelineRequest(), name="name_value", ) @@ -994,8 +946,7 @@ def test_list_training_pipelines( transport: str = "grpc", request_type=pipeline_service.ListTrainingPipelinesRequest ): client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1036,8 +987,7 @@ async def test_list_training_pipelines_async( request_type=pipeline_service.ListTrainingPipelinesRequest, ): client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1075,9 +1025,7 @@ async def test_list_training_pipelines_async_from_dict(): def test_list_training_pipelines_field_headers(): - client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1099,17 +1047,12 @@ def test_list_training_pipelines_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_training_pipelines_field_headers_async(): - client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1133,16 +1076,11 @@ async def test_list_training_pipelines_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_training_pipelines_flattened(): - client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1153,9 +1091,7 @@ def test_list_training_pipelines_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_training_pipelines( - parent="parent_value", - ) + client.list_training_pipelines(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1166,24 +1102,19 @@ def test_list_training_pipelines_flattened(): def test_list_training_pipelines_flattened_error(): - client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_training_pipelines( - pipeline_service.ListTrainingPipelinesRequest(), - parent="parent_value", + pipeline_service.ListTrainingPipelinesRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_training_pipelines_flattened_async(): - client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1197,9 +1128,7 @@ async def test_list_training_pipelines_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_training_pipelines( - parent="parent_value", - ) + response = await client.list_training_pipelines(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1211,23 +1140,18 @@ async def test_list_training_pipelines_flattened_async(): @pytest.mark.asyncio async def test_list_training_pipelines_flattened_error_async(): - client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_training_pipelines( - pipeline_service.ListTrainingPipelinesRequest(), - parent="parent_value", + pipeline_service.ListTrainingPipelinesRequest(), parent="parent_value", ) def test_list_training_pipelines_pager(): - client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = PipelineServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1244,13 +1168,10 @@ def test_list_training_pipelines_pager(): next_page_token="abc", ), pipeline_service.ListTrainingPipelinesResponse( - training_pipelines=[], - next_page_token="def", + training_pipelines=[], next_page_token="def", ), pipeline_service.ListTrainingPipelinesResponse( - training_pipelines=[ - training_pipeline.TrainingPipeline(), - ], + training_pipelines=[training_pipeline.TrainingPipeline(),], next_page_token="ghi", ), pipeline_service.ListTrainingPipelinesResponse( @@ -1276,9 +1197,7 @@ def test_list_training_pipelines_pager(): def test_list_training_pipelines_pages(): - client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = PipelineServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1295,13 +1214,10 @@ def test_list_training_pipelines_pages(): next_page_token="abc", ), pipeline_service.ListTrainingPipelinesResponse( - training_pipelines=[], - next_page_token="def", + training_pipelines=[], next_page_token="def", ), pipeline_service.ListTrainingPipelinesResponse( - training_pipelines=[ - training_pipeline.TrainingPipeline(), - ], + training_pipelines=[training_pipeline.TrainingPipeline(),], next_page_token="ghi", ), pipeline_service.ListTrainingPipelinesResponse( @@ -1319,9 +1235,7 @@ def test_list_training_pipelines_pages(): @pytest.mark.asyncio async def test_list_training_pipelines_async_pager(): - client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1340,13 +1254,10 @@ async def test_list_training_pipelines_async_pager(): next_page_token="abc", ), pipeline_service.ListTrainingPipelinesResponse( - training_pipelines=[], - next_page_token="def", + training_pipelines=[], next_page_token="def", ), pipeline_service.ListTrainingPipelinesResponse( - training_pipelines=[ - training_pipeline.TrainingPipeline(), - ], + training_pipelines=[training_pipeline.TrainingPipeline(),], next_page_token="ghi", ), pipeline_service.ListTrainingPipelinesResponse( @@ -1357,9 +1268,7 @@ async def test_list_training_pipelines_async_pager(): ), RuntimeError, ) - async_pager = await client.list_training_pipelines( - request={}, - ) + async_pager = await client.list_training_pipelines(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -1371,9 +1280,7 @@ async def test_list_training_pipelines_async_pager(): @pytest.mark.asyncio async def test_list_training_pipelines_async_pages(): - client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1392,13 +1299,10 @@ async def test_list_training_pipelines_async_pages(): next_page_token="abc", ), pipeline_service.ListTrainingPipelinesResponse( - training_pipelines=[], - next_page_token="def", + training_pipelines=[], next_page_token="def", ), pipeline_service.ListTrainingPipelinesResponse( - training_pipelines=[ - training_pipeline.TrainingPipeline(), - ], + training_pipelines=[training_pipeline.TrainingPipeline(),], next_page_token="ghi", ), pipeline_service.ListTrainingPipelinesResponse( @@ -1420,8 +1324,7 @@ def test_delete_training_pipeline( transport: str = "grpc", request_type=pipeline_service.DeleteTrainingPipelineRequest ): client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1457,8 +1360,7 @@ async def test_delete_training_pipeline_async( request_type=pipeline_service.DeleteTrainingPipelineRequest, ): client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1492,9 +1394,7 @@ async def test_delete_training_pipeline_async_from_dict(): def test_delete_training_pipeline_field_headers(): - client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1516,17 +1416,12 @@ def test_delete_training_pipeline_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_delete_training_pipeline_field_headers_async(): - client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1550,16 +1445,11 @@ async def test_delete_training_pipeline_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_delete_training_pipeline_flattened(): - client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1570,9 +1460,7 @@ def test_delete_training_pipeline_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_training_pipeline( - name="name_value", - ) + client.delete_training_pipeline(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1583,24 +1471,19 @@ def test_delete_training_pipeline_flattened(): def test_delete_training_pipeline_flattened_error(): - client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_training_pipeline( - pipeline_service.DeleteTrainingPipelineRequest(), - name="name_value", + pipeline_service.DeleteTrainingPipelineRequest(), name="name_value", ) @pytest.mark.asyncio async def test_delete_training_pipeline_flattened_async(): - client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1614,9 +1497,7 @@ async def test_delete_training_pipeline_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_training_pipeline( - name="name_value", - ) + response = await client.delete_training_pipeline(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1628,16 +1509,13 @@ async def test_delete_training_pipeline_flattened_async(): @pytest.mark.asyncio async def test_delete_training_pipeline_flattened_error_async(): - client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_training_pipeline( - pipeline_service.DeleteTrainingPipelineRequest(), - name="name_value", + pipeline_service.DeleteTrainingPipelineRequest(), name="name_value", ) @@ -1645,8 +1523,7 @@ def test_cancel_training_pipeline( transport: str = "grpc", request_type=pipeline_service.CancelTrainingPipelineRequest ): client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1682,8 +1559,7 @@ async def test_cancel_training_pipeline_async( request_type=pipeline_service.CancelTrainingPipelineRequest, ): client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1715,9 +1591,7 @@ async def test_cancel_training_pipeline_async_from_dict(): def test_cancel_training_pipeline_field_headers(): - client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1739,17 +1613,12 @@ def test_cancel_training_pipeline_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_cancel_training_pipeline_field_headers_async(): - client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1771,16 +1640,11 @@ async def test_cancel_training_pipeline_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_cancel_training_pipeline_flattened(): - client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1791,9 +1655,7 @@ def test_cancel_training_pipeline_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.cancel_training_pipeline( - name="name_value", - ) + client.cancel_training_pipeline(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1804,24 +1666,19 @@ def test_cancel_training_pipeline_flattened(): def test_cancel_training_pipeline_flattened_error(): - client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.cancel_training_pipeline( - pipeline_service.CancelTrainingPipelineRequest(), - name="name_value", + pipeline_service.CancelTrainingPipelineRequest(), name="name_value", ) @pytest.mark.asyncio async def test_cancel_training_pipeline_flattened_async(): - client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1833,9 +1690,7 @@ async def test_cancel_training_pipeline_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.cancel_training_pipeline( - name="name_value", - ) + response = await client.cancel_training_pipeline(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1847,16 +1702,13 @@ async def test_cancel_training_pipeline_flattened_async(): @pytest.mark.asyncio async def test_cancel_training_pipeline_flattened_error_async(): - client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PipelineServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.cancel_training_pipeline( - pipeline_service.CancelTrainingPipelineRequest(), - name="name_value", + pipeline_service.CancelTrainingPipelineRequest(), name="name_value", ) @@ -1867,8 +1719,7 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -1887,8 +1738,7 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = PipelineServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + client_options={"scopes": ["1", "2"]}, transport=transport, ) @@ -1933,13 +1783,8 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.PipelineServiceGrpcTransport, - ) + client = PipelineServiceClient(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client.transport, transports.PipelineServiceGrpcTransport,) def test_pipeline_service_base_transport_error(): @@ -1990,8 +1835,7 @@ def test_pipeline_service_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (credentials.AnonymousCredentials(), None) transport = transports.PipelineServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", + credentials_file="credentials.json", quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -2061,8 +1905,7 @@ def test_pipeline_service_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.PipelineServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2074,8 +1917,7 @@ def test_pipeline_service_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.PipelineServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2172,16 +2014,12 @@ def test_pipeline_service_transport_channel_mtls_with_adc(transport_class): def test_pipeline_service_grpc_lro_client(): client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - transport="grpc", + credentials=credentials.AnonymousCredentials(), transport="grpc", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) + assert isinstance(transport.operations_client, operations_v1.OperationsClient,) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -2189,16 +2027,12 @@ def test_pipeline_service_grpc_lro_client(): def test_pipeline_service_grpc_lro_async_client(): client = PipelineServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport="grpc_asyncio", + credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) + assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -2210,9 +2044,7 @@ def test_endpoint_path(): endpoint = "whelk" expected = "projects/{project}/locations/{location}/endpoints/{endpoint}".format( - project=project, - location=location, - endpoint=endpoint, + project=project, location=location, endpoint=endpoint, ) actual = PipelineServiceClient.endpoint_path(project, location, endpoint) assert expected == actual @@ -2237,9 +2069,7 @@ def test_model_path(): model = "winkle" expected = "projects/{project}/locations/{location}/models/{model}".format( - project=project, - location=location, - model=model, + project=project, location=location, model=model, ) actual = PipelineServiceClient.model_path(project, location, model) assert expected == actual @@ -2264,9 +2094,7 @@ def test_training_pipeline_path(): training_pipeline = "whelk" expected = "projects/{project}/locations/{location}/trainingPipelines/{training_pipeline}".format( - project=project, - location=location, - training_pipeline=training_pipeline, + project=project, location=location, training_pipeline=training_pipeline, ) actual = PipelineServiceClient.training_pipeline_path( project, location, training_pipeline @@ -2311,9 +2139,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "winkle" - expected = "folders/{folder}".format( - folder=folder, - ) + expected = "folders/{folder}".format(folder=folder,) actual = PipelineServiceClient.common_folder_path(folder) assert expected == actual @@ -2332,9 +2158,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "scallop" - expected = "organizations/{organization}".format( - organization=organization, - ) + expected = "organizations/{organization}".format(organization=organization,) actual = PipelineServiceClient.common_organization_path(organization) assert expected == actual @@ -2353,9 +2177,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "squid" - expected = "projects/{project}".format( - project=project, - ) + expected = "projects/{project}".format(project=project,) actual = PipelineServiceClient.common_project_path(project) assert expected == actual @@ -2376,8 +2198,7 @@ def test_common_location_path(): location = "octopus" expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, + project=project, location=location, ) actual = PipelineServiceClient.common_location_path(project, location) assert expected == actual @@ -2402,8 +2223,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.PipelineServiceTransport, "_prep_wrapped_messages" ) as prep: client = PipelineServiceClient( - credentials=credentials.AnonymousCredentials(), - client_info=client_info, + credentials=credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2412,7 +2232,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = PipelineServiceClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), - client_info=client_info, + credentials=credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_prediction_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_prediction_service.py index 6c9f551aa2..e47e0f62c5 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_prediction_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_prediction_service.py @@ -389,9 +389,7 @@ def test_prediction_service_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) + options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) @@ -459,8 +457,7 @@ def test_predict( transport: str = "grpc", request_type=prediction_service.PredictRequest ): client = PredictionServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -498,8 +495,7 @@ async def test_predict_async( transport: str = "grpc_asyncio", request_type=prediction_service.PredictRequest ): client = PredictionServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -535,9 +531,7 @@ async def test_predict_async_from_dict(): def test_predict_field_headers(): - client = PredictionServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PredictionServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -557,10 +551,7 @@ def test_predict_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "endpoint=endpoint/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "endpoint=endpoint/value",) in kw["metadata"] @pytest.mark.asyncio @@ -589,16 +580,11 @@ async def test_predict_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "endpoint=endpoint/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "endpoint=endpoint/value",) in kw["metadata"] def test_predict_flattened(): - client = PredictionServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PredictionServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.predict), "__call__") as call: @@ -631,9 +617,7 @@ def test_predict_flattened(): def test_predict_flattened_error(): - client = PredictionServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PredictionServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -706,8 +690,7 @@ def test_explain( transport: str = "grpc", request_type=prediction_service.ExplainRequest ): client = PredictionServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -745,8 +728,7 @@ async def test_explain_async( transport: str = "grpc_asyncio", request_type=prediction_service.ExplainRequest ): client = PredictionServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -782,9 +764,7 @@ async def test_explain_async_from_dict(): def test_explain_field_headers(): - client = PredictionServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PredictionServiceClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -804,10 +784,7 @@ def test_explain_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "endpoint=endpoint/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "endpoint=endpoint/value",) in kw["metadata"] @pytest.mark.asyncio @@ -836,16 +813,11 @@ async def test_explain_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "endpoint=endpoint/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "endpoint=endpoint/value",) in kw["metadata"] def test_explain_flattened(): - client = PredictionServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PredictionServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.explain), "__call__") as call: @@ -881,9 +853,7 @@ def test_explain_flattened(): def test_explain_flattened_error(): - client = PredictionServiceClient( - credentials=credentials.AnonymousCredentials(), - ) + client = PredictionServiceClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -964,8 +934,7 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = PredictionServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -984,8 +953,7 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = PredictionServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + client_options={"scopes": ["1", "2"]}, transport=transport, ) @@ -1030,13 +998,8 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = PredictionServiceClient( - credentials=credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.PredictionServiceGrpcTransport, - ) + client = PredictionServiceClient(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client.transport, transports.PredictionServiceGrpcTransport,) def test_prediction_service_base_transport_error(): @@ -1079,8 +1042,7 @@ def test_prediction_service_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (credentials.AnonymousCredentials(), None) transport = transports.PredictionServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", + credentials_file="credentials.json", quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -1150,8 +1112,7 @@ def test_prediction_service_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.PredictionServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -1163,8 +1124,7 @@ def test_prediction_service_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.PredictionServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -1265,9 +1225,7 @@ def test_endpoint_path(): endpoint = "whelk" expected = "projects/{project}/locations/{location}/endpoints/{endpoint}".format( - project=project, - location=location, - endpoint=endpoint, + project=project, location=location, endpoint=endpoint, ) actual = PredictionServiceClient.endpoint_path(project, location, endpoint) assert expected == actual @@ -1310,9 +1268,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "winkle" - expected = "folders/{folder}".format( - folder=folder, - ) + expected = "folders/{folder}".format(folder=folder,) actual = PredictionServiceClient.common_folder_path(folder) assert expected == actual @@ -1331,9 +1287,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "scallop" - expected = "organizations/{organization}".format( - organization=organization, - ) + expected = "organizations/{organization}".format(organization=organization,) actual = PredictionServiceClient.common_organization_path(organization) assert expected == actual @@ -1352,9 +1306,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "squid" - expected = "projects/{project}".format( - project=project, - ) + expected = "projects/{project}".format(project=project,) actual = PredictionServiceClient.common_project_path(project) assert expected == actual @@ -1375,8 +1327,7 @@ def test_common_location_path(): location = "octopus" expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, + project=project, location=location, ) actual = PredictionServiceClient.common_location_path(project, location) assert expected == actual @@ -1401,8 +1352,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.PredictionServiceTransport, "_prep_wrapped_messages" ) as prep: client = PredictionServiceClient( - credentials=credentials.AnonymousCredentials(), - client_info=client_info, + credentials=credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -1411,7 +1361,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = PredictionServiceClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), - client_info=client_info, + credentials=credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/aiplatform_v1beta1/test_specialist_pool_service.py b/tests/unit/gapic/aiplatform_v1beta1/test_specialist_pool_service.py index e08177ca7e..6c1061d588 100644 --- a/tests/unit/gapic/aiplatform_v1beta1/test_specialist_pool_service.py +++ b/tests/unit/gapic/aiplatform_v1beta1/test_specialist_pool_service.py @@ -404,9 +404,7 @@ def test_specialist_pool_service_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) + options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) @@ -479,8 +477,7 @@ def test_create_specialist_pool( request_type=specialist_pool_service.CreateSpecialistPoolRequest, ): client = SpecialistPoolServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -516,8 +513,7 @@ async def test_create_specialist_pool_async( request_type=specialist_pool_service.CreateSpecialistPoolRequest, ): client = SpecialistPoolServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -575,10 +571,7 @@ def test_create_specialist_pool_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio @@ -609,10 +602,7 @@ async def test_create_specialist_pool_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_create_specialist_pool_flattened(): @@ -717,8 +707,7 @@ def test_get_specialist_pool( request_type=specialist_pool_service.GetSpecialistPoolRequest, ): client = SpecialistPoolServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -771,8 +760,7 @@ async def test_get_specialist_pool_async( request_type=specialist_pool_service.GetSpecialistPoolRequest, ): client = SpecialistPoolServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -846,10 +834,7 @@ def test_get_specialist_pool_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio @@ -880,10 +865,7 @@ async def test_get_specialist_pool_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_specialist_pool_flattened(): @@ -900,9 +882,7 @@ def test_get_specialist_pool_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_specialist_pool( - name="name_value", - ) + client.get_specialist_pool(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -921,8 +901,7 @@ def test_get_specialist_pool_flattened_error(): # fields is an error. with pytest.raises(ValueError): client.get_specialist_pool( - specialist_pool_service.GetSpecialistPoolRequest(), - name="name_value", + specialist_pool_service.GetSpecialistPoolRequest(), name="name_value", ) @@ -944,9 +923,7 @@ async def test_get_specialist_pool_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_specialist_pool( - name="name_value", - ) + response = await client.get_specialist_pool(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -966,8 +943,7 @@ async def test_get_specialist_pool_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.get_specialist_pool( - specialist_pool_service.GetSpecialistPoolRequest(), - name="name_value", + specialist_pool_service.GetSpecialistPoolRequest(), name="name_value", ) @@ -976,8 +952,7 @@ def test_list_specialist_pools( request_type=specialist_pool_service.ListSpecialistPoolsRequest, ): client = SpecialistPoolServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1018,8 +993,7 @@ async def test_list_specialist_pools_async( request_type=specialist_pool_service.ListSpecialistPoolsRequest, ): client = SpecialistPoolServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1081,10 +1055,7 @@ def test_list_specialist_pools_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio @@ -1115,10 +1086,7 @@ async def test_list_specialist_pools_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_specialist_pools_flattened(): @@ -1135,9 +1103,7 @@ def test_list_specialist_pools_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_specialist_pools( - parent="parent_value", - ) + client.list_specialist_pools(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1156,8 +1122,7 @@ def test_list_specialist_pools_flattened_error(): # fields is an error. with pytest.raises(ValueError): client.list_specialist_pools( - specialist_pool_service.ListSpecialistPoolsRequest(), - parent="parent_value", + specialist_pool_service.ListSpecialistPoolsRequest(), parent="parent_value", ) @@ -1179,9 +1144,7 @@ async def test_list_specialist_pools_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_specialist_pools( - parent="parent_value", - ) + response = await client.list_specialist_pools(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1201,15 +1164,12 @@ async def test_list_specialist_pools_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.list_specialist_pools( - specialist_pool_service.ListSpecialistPoolsRequest(), - parent="parent_value", + specialist_pool_service.ListSpecialistPoolsRequest(), parent="parent_value", ) def test_list_specialist_pools_pager(): - client = SpecialistPoolServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = SpecialistPoolServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1226,13 +1186,10 @@ def test_list_specialist_pools_pager(): next_page_token="abc", ), specialist_pool_service.ListSpecialistPoolsResponse( - specialist_pools=[], - next_page_token="def", + specialist_pools=[], next_page_token="def", ), specialist_pool_service.ListSpecialistPoolsResponse( - specialist_pools=[ - specialist_pool.SpecialistPool(), - ], + specialist_pools=[specialist_pool.SpecialistPool(),], next_page_token="ghi", ), specialist_pool_service.ListSpecialistPoolsResponse( @@ -1258,9 +1215,7 @@ def test_list_specialist_pools_pager(): def test_list_specialist_pools_pages(): - client = SpecialistPoolServiceClient( - credentials=credentials.AnonymousCredentials, - ) + client = SpecialistPoolServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1277,13 +1232,10 @@ def test_list_specialist_pools_pages(): next_page_token="abc", ), specialist_pool_service.ListSpecialistPoolsResponse( - specialist_pools=[], - next_page_token="def", + specialist_pools=[], next_page_token="def", ), specialist_pool_service.ListSpecialistPoolsResponse( - specialist_pools=[ - specialist_pool.SpecialistPool(), - ], + specialist_pools=[specialist_pool.SpecialistPool(),], next_page_token="ghi", ), specialist_pool_service.ListSpecialistPoolsResponse( @@ -1322,13 +1274,10 @@ async def test_list_specialist_pools_async_pager(): next_page_token="abc", ), specialist_pool_service.ListSpecialistPoolsResponse( - specialist_pools=[], - next_page_token="def", + specialist_pools=[], next_page_token="def", ), specialist_pool_service.ListSpecialistPoolsResponse( - specialist_pools=[ - specialist_pool.SpecialistPool(), - ], + specialist_pools=[specialist_pool.SpecialistPool(),], next_page_token="ghi", ), specialist_pool_service.ListSpecialistPoolsResponse( @@ -1339,9 +1288,7 @@ async def test_list_specialist_pools_async_pager(): ), RuntimeError, ) - async_pager = await client.list_specialist_pools( - request={}, - ) + async_pager = await client.list_specialist_pools(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -1374,13 +1321,10 @@ async def test_list_specialist_pools_async_pages(): next_page_token="abc", ), specialist_pool_service.ListSpecialistPoolsResponse( - specialist_pools=[], - next_page_token="def", + specialist_pools=[], next_page_token="def", ), specialist_pool_service.ListSpecialistPoolsResponse( - specialist_pools=[ - specialist_pool.SpecialistPool(), - ], + specialist_pools=[specialist_pool.SpecialistPool(),], next_page_token="ghi", ), specialist_pool_service.ListSpecialistPoolsResponse( @@ -1403,8 +1347,7 @@ def test_delete_specialist_pool( request_type=specialist_pool_service.DeleteSpecialistPoolRequest, ): client = SpecialistPoolServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1440,8 +1383,7 @@ async def test_delete_specialist_pool_async( request_type=specialist_pool_service.DeleteSpecialistPoolRequest, ): client = SpecialistPoolServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1499,10 +1441,7 @@ def test_delete_specialist_pool_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio @@ -1533,10 +1472,7 @@ async def test_delete_specialist_pool_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name/value", - ) in kw["metadata"] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_delete_specialist_pool_flattened(): @@ -1553,9 +1489,7 @@ def test_delete_specialist_pool_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_specialist_pool( - name="name_value", - ) + client.delete_specialist_pool(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1574,8 +1508,7 @@ def test_delete_specialist_pool_flattened_error(): # fields is an error. with pytest.raises(ValueError): client.delete_specialist_pool( - specialist_pool_service.DeleteSpecialistPoolRequest(), - name="name_value", + specialist_pool_service.DeleteSpecialistPoolRequest(), name="name_value", ) @@ -1597,9 +1530,7 @@ async def test_delete_specialist_pool_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_specialist_pool( - name="name_value", - ) + response = await client.delete_specialist_pool(name="name_value",) # Establish that the underlying call was made with the expected # request object values. @@ -1619,8 +1550,7 @@ async def test_delete_specialist_pool_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.delete_specialist_pool( - specialist_pool_service.DeleteSpecialistPoolRequest(), - name="name_value", + specialist_pool_service.DeleteSpecialistPoolRequest(), name="name_value", ) @@ -1629,8 +1559,7 @@ def test_update_specialist_pool( request_type=specialist_pool_service.UpdateSpecialistPoolRequest, ): client = SpecialistPoolServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1666,8 +1595,7 @@ async def test_update_specialist_pool_async( request_type=specialist_pool_service.UpdateSpecialistPoolRequest, ): client = SpecialistPoolServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1869,8 +1797,7 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = SpecialistPoolServiceClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -1889,8 +1816,7 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = SpecialistPoolServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + client_options={"scopes": ["1", "2"]}, transport=transport, ) @@ -1938,10 +1864,7 @@ def test_transport_grpc_default(): client = SpecialistPoolServiceClient( credentials=credentials.AnonymousCredentials(), ) - assert isinstance( - client.transport, - transports.SpecialistPoolServiceGrpcTransport, - ) + assert isinstance(client.transport, transports.SpecialistPoolServiceGrpcTransport,) def test_specialist_pool_service_base_transport_error(): @@ -1992,8 +1915,7 @@ def test_specialist_pool_service_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (credentials.AnonymousCredentials(), None) transport = transports.SpecialistPoolServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", + credentials_file="credentials.json", quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -2063,8 +1985,7 @@ def test_specialist_pool_service_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.SpecialistPoolServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2076,8 +1997,7 @@ def test_specialist_pool_service_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.SpecialistPoolServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2174,16 +2094,12 @@ def test_specialist_pool_service_transport_channel_mtls_with_adc(transport_class def test_specialist_pool_service_grpc_lro_client(): client = SpecialistPoolServiceClient( - credentials=credentials.AnonymousCredentials(), - transport="grpc", + credentials=credentials.AnonymousCredentials(), transport="grpc", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) + assert isinstance(transport.operations_client, operations_v1.OperationsClient,) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -2191,16 +2107,12 @@ def test_specialist_pool_service_grpc_lro_client(): def test_specialist_pool_service_grpc_lro_async_client(): client = SpecialistPoolServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport="grpc_asyncio", + credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) + assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -2212,9 +2124,7 @@ def test_specialist_pool_path(): specialist_pool = "whelk" expected = "projects/{project}/locations/{location}/specialistPools/{specialist_pool}".format( - project=project, - location=location, - specialist_pool=specialist_pool, + project=project, location=location, specialist_pool=specialist_pool, ) actual = SpecialistPoolServiceClient.specialist_pool_path( project, location, specialist_pool @@ -2259,9 +2169,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "winkle" - expected = "folders/{folder}".format( - folder=folder, - ) + expected = "folders/{folder}".format(folder=folder,) actual = SpecialistPoolServiceClient.common_folder_path(folder) assert expected == actual @@ -2280,9 +2188,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "scallop" - expected = "organizations/{organization}".format( - organization=organization, - ) + expected = "organizations/{organization}".format(organization=organization,) actual = SpecialistPoolServiceClient.common_organization_path(organization) assert expected == actual @@ -2301,9 +2207,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "squid" - expected = "projects/{project}".format( - project=project, - ) + expected = "projects/{project}".format(project=project,) actual = SpecialistPoolServiceClient.common_project_path(project) assert expected == actual @@ -2324,8 +2228,7 @@ def test_common_location_path(): location = "octopus" expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, + project=project, location=location, ) actual = SpecialistPoolServiceClient.common_location_path(project, location) assert expected == actual @@ -2350,8 +2253,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.SpecialistPoolServiceTransport, "_prep_wrapped_messages" ) as prep: client = SpecialistPoolServiceClient( - credentials=credentials.AnonymousCredentials(), - client_info=client_info, + credentials=credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2360,7 +2262,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = SpecialistPoolServiceClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), - client_info=client_info, + credentials=credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info)