From e24ee5185ceb12a03ff99b0505c303a1173c143b Mon Sep 17 00:00:00 2001 From: Sasha Sobran Date: Mon, 29 Nov 2021 15:50:49 -0500 Subject: [PATCH 01/11] checkpoint --- google/cloud/aiplatform/base.py | 77 +++-- google/cloud/aiplatform/datasets/dataset.py | 3 +- google/cloud/aiplatform/initializer.py | 1 + google/cloud/aiplatform/jobs.py | 18 +- google/cloud/aiplatform/metadata/artifact.py | 2 + google/cloud/aiplatform/metadata/context.py | 2 + google/cloud/aiplatform/metadata/execution.py | 2 + .../aiplatform/metadata/metadata_store.py | 2 + google/cloud/aiplatform/metadata/resource.py | 1 - google/cloud/aiplatform/models.py | 6 +- google/cloud/aiplatform/pipeline_jobs.py | 8 +- .../aiplatform/tensorboard/tensorboard.py | 268 +++++++++++++++++- google/cloud/aiplatform/training_jobs.py | 15 +- google/cloud/aiplatform/utils/__init__.py | 85 +++--- .../cloud/aiplatform/utils/console_utils.py | 11 +- tests/unit/aiplatform/test_jobs.py | 15 +- 16 files changed, 415 insertions(+), 101 deletions(-) diff --git a/google/cloud/aiplatform/base.py b/google/cloud/aiplatform/base.py index c4eb2e4853..c20abdf467 100644 --- a/google/cloud/aiplatform/base.py +++ b/google/cloud/aiplatform/base.py @@ -397,7 +397,6 @@ class VertexAiResourceNoun(metaclass=abc.ABCMeta): Subclasses require two class attributes: client_class: The client to instantiate to interact with this resource noun. - _is_client_prediction_client: Flag to indicate if the client requires a prediction endpoint. Subclass is required to populate private attribute _gca_resource which is the service representation of the resource noun. @@ -414,29 +413,40 @@ def client_class(cls) -> Type[utils.VertexAiServiceClientWithOverride]: @property @classmethod @abc.abstractmethod - def _is_client_prediction_client(cls) -> bool: - """Flag to indicate whether to use prediction endpoint with client.""" - pass - - @property - @abc.abstractmethod def _getter_method(cls) -> str: """Name of getter method of client class for retrieving the resource.""" pass @property + @classmethod @abc.abstractmethod def _delete_method(cls) -> str: """Name of delete method of client class for deleting the resource.""" pass @property + @classmethod @abc.abstractmethod def _resource_noun(cls) -> str: """Resource noun.""" pass + @property + @classmethod + @abc.abstractmethod + def _parse_resource_name_method(cls) -> str: + """Method name on GAPIC client to parse a resource name.""" + pass + + @property + @classmethod + @abc.abstractmethod + def _format_resource_name_method(self) -> str: + """Method name on GAPIC client to format a resource name.""" + pass + + def __init__( self, project: Optional[str] = None, @@ -486,15 +496,49 @@ def _instantiate_client( client_class=cls.client_class, credentials=credentials, location_override=location, - prediction_client=cls._is_client_prediction_client, ) + @classmethod + def parse_resource_name(cls, resource_name: str) -> Dict[str, str]: + """ + Parses resource name into it's component segments. + + Args: + resource_name: Resource name of this resource. + Returns: + Dictionary of component segments. + """ + + # gets the underlying wrapped gapic client class + return getattr(cls.client_class.get_gapic_client_class(), cls._parse_resource_name_method)(resource_name) + + @classmethod + def format_resource_name(cls, **kwargs: str) -> str: + """ + Formats a resource name it's component segments. + + Args: + **kwargs: Resource name parts. Singular and snake case. ie: + + format_resource_name( + project='my-project', + location='us-central1' + ) + Returns: + Resource name. + """ + + # gets the underlying wrapped gapic client class + return getattr(cls.client_class.get_gapic_client_class(), cls._format_resource_name_method)(**kwargs) + + + def _get_and_validate_project_location( self, resource_name: str, project: Optional[str] = None, location: Optional[str] = None, - ) -> Tuple: + ) -> Tuple[str, str]: """Validate the project and location for the resource. @@ -507,19 +551,19 @@ def _get_and_validate_project_location( RuntimeError if location is different from resource location """ - fields = utils.extract_fields_from_resource_name( - resource_name, self._resource_noun - ) + fields = self.parse_resource_name(resource_name) + if not fields: return project, location - if location and fields.location != location: + if location and fields['location'] != location: raise RuntimeError( f"location {location} is provided, but different from " - f"the resource location {fields.location}" + f"the resource location {fields['location']}" ) - return fields.project, fields.location + return fields['project'], fields['location'] + def _get_gca_resource(self, resource_name: str) -> proto.Message: """Returns GAPIC service representation of client class resource.""" @@ -528,10 +572,11 @@ def _get_gca_resource(self, resource_name: str) -> proto.Message: resource_name (str): Required. A fully-qualified resource name or ID. """ - resource_name = utils.full_resource_name( resource_name=resource_name, resource_noun=self._resource_noun, + parse_resource_name_method=self.parse_resource_name, + format_resource_name_method=self.format_resource_name, project=self.project, location=self.location, ) diff --git a/google/cloud/aiplatform/datasets/dataset.py b/google/cloud/aiplatform/datasets/dataset.py index 5e5de0058b..256119c51e 100644 --- a/google/cloud/aiplatform/datasets/dataset.py +++ b/google/cloud/aiplatform/datasets/dataset.py @@ -39,11 +39,12 @@ class _Dataset(base.VertexAiResourceNounWithFutureManager): """Managed dataset resource for Vertex AI.""" client_class = utils.DatasetClientWithOverride - _is_client_prediction_client = False _resource_noun = "datasets" _getter_method = "get_dataset" _list_method = "list_datasets" _delete_method = "delete_dataset" + _parse_resource_name_method = "parse_dataset_path" + _foramt_resource_name_method = "dataset_path" _supported_metadata_schema_uris: Tuple[str] = () diff --git a/google/cloud/aiplatform/initializer.py b/google/cloud/aiplatform/initializer.py index ea1a51c8a7..966bd2265f 100644 --- a/google/cloud/aiplatform/initializer.py +++ b/google/cloud/aiplatform/initializer.py @@ -60,6 +60,7 @@ def init( staging_bucket: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, encryption_spec_key_name: Optional[str] = None, + tensorboard_resource: Optional[str] = None, ): """Updates common initialization parameters with provided options. diff --git a/google/cloud/aiplatform/jobs.py b/google/cloud/aiplatform/jobs.py index eb593c70bf..3af3d5e19e 100644 --- a/google/cloud/aiplatform/jobs.py +++ b/google/cloud/aiplatform/jobs.py @@ -82,7 +82,6 @@ class _Job(base.VertexAiResourceNounWithFutureManager): """ client_class = utils.JobClientWithOverride - _is_client_prediction_client = False def __init__( self, @@ -167,8 +166,11 @@ def _cancel_method(cls) -> str: def _dashboard_uri(self) -> Optional[str]: """Helper method to compose the dashboard uri where job can be viewed.""" - fields = utils.extract_fields_from_resource_name(self.resource_name) - url = f"https://console.cloud.google.com/ai/platform/locations/{fields.location}/{self._job_type}/{fields.id}?project={fields.project}" + fields = self.parse_resource_name(self.resource_name) + location = fields.pop('location') + project = fields.pop('project') + job = list(fields.values())[0] + url = f"https://console.cloud.google.com/ai/platform/locations/{location}/{self._job_type}/{job}?project={project}" return url def _log_job_state(self): @@ -279,6 +281,8 @@ class BatchPredictionJob(_Job): _cancel_method = "cancel_batch_prediction_job" _delete_method = "delete_batch_prediction_job" _job_type = "batch-predictions" + _parse_resource_name_method = "parse_batch_prediction_job_path" + _format_resource_name_method = "batch_prediction_job_path" def __init__( self, @@ -532,6 +536,8 @@ def create( model_name = utils.full_resource_name( resource_name=model_name, resource_noun="models", + parse_resource_name_method=aiplatform.Model.parse_resource_name, + format_resource_name_method=aiplatform.Model.format_resource_name, project=project, location=location, ) @@ -951,6 +957,8 @@ class DataLabelingJob(_Job): _cancel_method = "cancel_data_labeling_job" _delete_method = "delete_data_labeling_job" _job_type = "labeling-tasks" + _parse_resource_name_method = "parse_data_labeling_job_path" + _format_resource_name_method = "data_labeling_job_path" pass @@ -962,6 +970,8 @@ class CustomJob(_RunnableJob): _list_method = "list_custom_jobs" _cancel_method = "cancel_custom_job" _delete_method = "delete_custom_job" + _parse_resource_name_method = "parse_custom_job_path" + _format_resource_name_method = "custom_job_path" _job_type = "training" def __init__( @@ -1435,6 +1445,8 @@ class HyperparameterTuningJob(_RunnableJob): _list_method = "list_hyperparameter_tuning_jobs" _cancel_method = "cancel_hyperparameter_tuning_job" _delete_method = "delete_hyperparameter_tuning_job" + _parse_resource_name_method = "parse_hyperparameter_tuning_job_path" + _format_resource_name_method = "hyperparameter_tuning_job_path" _job_type = "training" def __init__( diff --git a/google/cloud/aiplatform/metadata/artifact.py b/google/cloud/aiplatform/metadata/artifact.py index 13ac2aafbf..41a6e73949 100644 --- a/google/cloud/aiplatform/metadata/artifact.py +++ b/google/cloud/aiplatform/metadata/artifact.py @@ -31,6 +31,8 @@ class _Artifact(resource._Resource): _resource_noun = "artifacts" _getter_method = "get_artifact" _delete_method = "delete_artifact" + _parse_resource_name_method = "parse_artifact_path" + _format_resource_name_method = "artifact_path" @classmethod def _create_resource( diff --git a/google/cloud/aiplatform/metadata/context.py b/google/cloud/aiplatform/metadata/context.py index 670d8af844..f2868f4f55 100644 --- a/google/cloud/aiplatform/metadata/context.py +++ b/google/cloud/aiplatform/metadata/context.py @@ -31,6 +31,8 @@ class _Context(resource._Resource): _resource_noun = "contexts" _getter_method = "get_context" _delete_method = "delete_context" + _parse_resource_name_method = "parse_context_path" + _format_resource_name_method = "context_path" def add_artifacts_and_executions( self, diff --git a/google/cloud/aiplatform/metadata/execution.py b/google/cloud/aiplatform/metadata/execution.py index 50655d0175..c02fea6476 100644 --- a/google/cloud/aiplatform/metadata/execution.py +++ b/google/cloud/aiplatform/metadata/execution.py @@ -34,6 +34,8 @@ class _Execution(resource._Resource): _resource_noun = "executions" _getter_method = "get_execution" _delete_method = "delete_execution" + _parse_resource_name_method = "parse_execution_path" + _format_resource_name_method = "execution_path" def add_artifact( self, artifact_resource_name: str, input: bool, diff --git a/google/cloud/aiplatform/metadata/metadata_store.py b/google/cloud/aiplatform/metadata/metadata_store.py index 82b6742df5..c7cbcaa68c 100644 --- a/google/cloud/aiplatform/metadata/metadata_store.py +++ b/google/cloud/aiplatform/metadata/metadata_store.py @@ -35,6 +35,8 @@ class _MetadataStore(base.VertexAiResourceNounWithFutureManager): _resource_noun = "metadataStores" _getter_method = "get_metadata_store" _delete_method = "delete_metadata_store" + _parse_resource_name_method = "parse_metadata_store_path" + _format_resource_name_method = "metadata_store_path" def __init__( self, diff --git a/google/cloud/aiplatform/metadata/resource.py b/google/cloud/aiplatform/metadata/resource.py index 3ebcaa5112..6a6c6175b8 100644 --- a/google/cloud/aiplatform/metadata/resource.py +++ b/google/cloud/aiplatform/metadata/resource.py @@ -36,7 +36,6 @@ class _Resource(base.VertexAiResourceNounWithFutureManager, abc.ABC): """Metadata Resource for Vertex AI""" client_class = utils.MetadataClientWithOverride - _is_client_prediction_client = False _delete_method = None def __init__( diff --git a/google/cloud/aiplatform/models.py b/google/cloud/aiplatform/models.py index 2ce48adc53..31b3968da2 100644 --- a/google/cloud/aiplatform/models.py +++ b/google/cloud/aiplatform/models.py @@ -73,11 +73,12 @@ class Prediction(NamedTuple): class Endpoint(base.VertexAiResourceNounWithFutureManager): client_class = utils.EndpointClientWithOverride - _is_client_prediction_client = False _resource_noun = "endpoints" _getter_method = "get_endpoint" _list_method = "list_endpoints" _delete_method = "delete_endpoint" + _parse_resource_name_method = "parse_endpoint_path" + _format_resource_name_method = "endpoint_path" def __init__( self, @@ -1294,11 +1295,12 @@ def delete(self, force: bool = False, sync: bool = True) -> None: class Model(base.VertexAiResourceNounWithFutureManager): client_class = utils.ModelClientWithOverride - _is_client_prediction_client = False _resource_noun = "models" _getter_method = "get_model" _list_method = "list_models" _delete_method = "delete_model" + _parse_resource_name_method = "parse_model_path" + _format_resource_name_method = "model_path" @property def uri(self) -> Optional[str]: diff --git a/google/cloud/aiplatform/pipeline_jobs.py b/google/cloud/aiplatform/pipeline_jobs.py index 5679a58be6..bcacf08939 100644 --- a/google/cloud/aiplatform/pipeline_jobs.py +++ b/google/cloud/aiplatform/pipeline_jobs.py @@ -79,12 +79,12 @@ def _set_enable_caching_value( class PipelineJob(base.VertexAiResourceNounWithFutureManager): client_class = utils.PipelineJobClientWithOverride - _is_client_prediction_client = False - _resource_noun = "pipelineJobs" _delete_method = "delete_pipeline_job" _getter_method = "get_pipeline_job" _list_method = "list_pipeline_jobs" + _parse_resource_name_method = "parse_pipeline_job_path" + _format_resource_name_method = "pipeline_job_path" def __init__( self, @@ -315,8 +315,8 @@ def has_failed(self) -> bool: def _dashboard_uri(self) -> str: """Helper method to compose the dashboard uri where pipeline can be viewed.""" - fields = utils.extract_fields_from_resource_name(self.resource_name) - url = f"https://console.cloud.google.com/vertex-ai/locations/{fields.location}/pipelines/runs/{fields.id}?project={fields.project}" + fields = self.parse_resource_name(self.resource_name) + url = f"https://console.cloud.google.com/vertex-ai/locations/{fields['location']}/pipelines/runs/{fields['pipeline_job']}?project={fields['project']}" return url def _block_until_complete(self): diff --git a/google/cloud/aiplatform/tensorboard/tensorboard.py b/google/cloud/aiplatform/tensorboard/tensorboard.py index 1e41cc9755..8381319ecc 100644 --- a/google/cloud/aiplatform/tensorboard/tensorboard.py +++ b/google/cloud/aiplatform/tensorboard/tensorboard.py @@ -27,16 +27,17 @@ _LOGGER = base.Logger(__name__) +class _TensorboardServiceResource(base.VertexAiResourceNounWithFutureManager): + client_class = utils.TensorboardClientWithOverride -class Tensorboard(base.VertexAiResourceNounWithFutureManager): +class Tensorboard(_TensorboardServiceResource): """Managed tensorboard resource for Vertex AI.""" - - client_class = utils.TensorboardClientWithOverride - _is_client_prediction_client = False _resource_noun = "tensorboards" _getter_method = "get_tensorboard" _list_method = "list_tensorboards" _delete_method = "delete_tensorboard" + _parse_resource_name_method = "parse_tensorboard_path" + _format_resource_name_method = "tensorboard_path" def __init__( self, @@ -281,3 +282,262 @@ def update( _LOGGER.log_action_completed_against_resource("tensorboard", "updated", self) return self + + +class TensorboardExperiment(_TensorboardServiceResource): + """Managed tensorboard experiment resource for Vertex AI.""" + _resource_noun = "experiments" + _getter_method = "get_tensorboard_experiment" + _list_method = "list_tensorboard_experiment" + _delete_method = "delete_tensorboard_experiment" + _parse_resource_name_method = "parse_tensorboard_experiment_path" + _format_resource_name_method = "tensorboard_experiment_path" + + def __init__( + self, + tensorboard_experiment_name: str, + tensorboard_name: Optional[str] = None, + project: Optional[str] = None, + location: Optional[str] = None, + credentials: Optional[auth_credentials.Credentials] = None, + ): + """Retrieves an existing managed tensorboard experiment given a tensorboard experiment name or ID. + + Args: + tensorboard_experiment_name (str): + Required: A fully-qualified tensorboard resource experiment name or tensorboard experiment ID. + Example: "projects/123/locations/us-central1/tensorboards/456/experiments/123" or + "123" when tensorboard_name is passed and project and location are initialized or passed. + tensorboard_name (str): + Optional. Tensorboard resource id. + project (str): + Optional. Project to retrieve tensorboard from. If not set, project + set in aiplatform.init will be used. + location (str): + Optional. Location to retrieve tensorboard from. If not set, location + set in aiplatform.init will be used. + credentials (auth_credentials.Credentials): + Optional. Custom credentials to use to retrieve this Tensorboard. Overrides + credentials set in aiplatform.init. + """ + + super().__init__( + project=project, + location=location, + credentials=credentials, + resource_name=tensorboard_experiment_name, + ) + + + self._gca_resource = self._get_gca_resource(resource_name=tensorboard_name) + + @classmethod + def create( + cls, + display_name: str, + description: Optional[str] = None, + labels: Optional[Dict[str, str]] = None, + project: Optional[str] = None, + location: Optional[str] = None, + credentials: Optional[auth_credentials.Credentials] = None, + request_metadata: Optional[Sequence[Tuple[str, str]]] = (), + encryption_spec_key_name: Optional[str] = None, + ) -> "Tensorboard": + """Creates a new tensorboard. + + Example Usage: + + tb = aiplatform.Tensorboard.create( + display_name='my display name', + description='my description', + labels={ + 'key1': 'value1', + 'key2': 'value2' + } + ) + + Args: + display_name (str): + Required. The user-defined name of the Tensorboard. + The name can be up to 128 characters long and can be consist + of any UTF-8 characters. + description (str): + Optional. Description of this Tensorboard. + labels (Dict[str, str]): + Optional. Labels with user-defined metadata to organize your Tensorboards. + Label keys and values can be no longer than 64 characters + (Unicode codepoints), can only contain lowercase letters, numeric + characters, underscores and dashes. International characters are allowed. + No more than 64 user labels can be associated with one Tensorboard + (System labels are excluded). + See https://goo.gl/xmQnxf for more information and examples of labels. + System reserved label keys are prefixed with "aiplatform.googleapis.com/" + and are immutable. + project (str): + Optional. Project to upload this model to. Overrides project set in + aiplatform.init. + location (str): + Optional. Location to upload this model to. Overrides location set in + aiplatform.init. + credentials (auth_credentials.Credentials): + Optional. Custom credentials to use to upload this model. Overrides + credentials set in aiplatform.init. + request_metadata (Sequence[Tuple[str, str]]): + Optional. Strings which should be sent along with the request as metadata. + encryption_spec_key_name (str): + Optional. Cloud KMS resource identifier of the customer + managed encryption key used to protect the tensorboard. Has the + form: + ``projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key``. + The key needs to be in the same region as where the compute + resource is created. + + If set, this Tensorboard and all sub-resources of this Tensorboard will be secured by this key. + + Overrides encryption_spec_key_name set in aiplatform.init. + + Returns: + tensorboard (Tensorboard): + Instantiated representation of the managed tensorboard resource. + """ + + utils.validate_display_name(display_name) + if labels: + utils.validate_labels(labels) + + api_client = cls._instantiate_client(location=location, credentials=credentials) + + parent = initializer.global_config.common_location_path( + project=project, location=location + ) + + encryption_spec = initializer.global_config.get_encryption_spec( + encryption_spec_key_name=encryption_spec_key_name + ) + + gapic_tensorboard = gca_tensorboard.Tensorboard( + display_name=display_name, + description=description, + labels=labels, + encryption_spec=encryption_spec, + ) + + create_tensorboard_lro = api_client.create_tensorboard( + parent=parent, tensorboard=gapic_tensorboard, metadata=request_metadata + ) + + _LOGGER.log_create_with_lro(cls, create_tensorboard_lro) + + created_tensorboard = create_tensorboard_lro.result() + + _LOGGER.log_create_complete(cls, created_tensorboard, "tb") + + return cls( + tensorboard_name=created_tensorboard.name, + project=project or initializer.global_config.project, + location=location or initializer.global_config.location, + credentials=credentials, + ) + + def update( + self, + display_name: Optional[str] = None, + description: Optional[str] = None, + labels: Optional[Dict[str, str]] = None, + request_metadata: Optional[Sequence[Tuple[str, str]]] = (), + encryption_spec_key_name: Optional[str] = None, + ) -> "Tensorboard": + """Updates an existing tensorboard. + + Example Usage: + + tb = aiplatform.Tensorboard(tensorboard_name='123456') + tb.update( + display_name='update my display name', + description='update my description', + ) + + Args: + display_name (str): + Optional. User-defined name of the Tensorboard. + The name can be up to 128 characters long and can be consist + of any UTF-8 characters. + description (str): + Optional. Description of this Tensorboard. + labels (Dict[str, str]): + Optional. Labels with user-defined metadata to organize your Tensorboards. + Label keys and values can be no longer than 64 characters + (Unicode codepoints), can only contain lowercase letters, numeric + characters, underscores and dashes. International characters are allowed. + No more than 64 user labels can be associated with one Tensorboard + (System labels are excluded). + See https://goo.gl/xmQnxf for more information and examples of labels. + System reserved label keys are prefixed with "aiplatform.googleapis.com/" + and are immutable. + request_metadata (Sequence[Tuple[str, str]]): + Optional. Strings which should be sent along with the request as metadata. + encryption_spec_key_name (str): + Optional. Cloud KMS resource identifier of the customer + managed encryption key used to protect the tensorboard. Has the + form: + ``projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key``. + The key needs to be in the same region as where the compute + resource is created. + + If set, this Tensorboard and all sub-resources of this Tensorboard will be secured by this key. + + Overrides encryption_spec_key_name set in aiplatform.init. + + Returns: + tensorboard (Tensorboard): + The managed tensorboard resource. + """ + update_mask = list() + + if display_name: + utils.validate_display_name(display_name) + update_mask.append("display_name") + + if description: + update_mask.append("description") + + if labels: + utils.validate_labels(labels) + update_mask.append("labels") + + encryption_spec = None + if encryption_spec_key_name: + encryption_spec = initializer.global_config.get_encryption_spec( + encryption_spec_key_name=encryption_spec_key_name, + ) + update_mask.append("encryption_spec") + + update_mask = field_mask_pb2.FieldMask(paths=update_mask) + + gapic_tensorboard = gca_tensorboard.Tensorboard( + name=self.resource_name, + display_name=display_name, + description=description, + labels=labels, + encryption_spec=encryption_spec, + ) + + _LOGGER.log_action_start_against_resource( + "Updating", "tensorboard", self, + ) + + update_tensorboard_lro = self.api_client.update_tensorboard( + tensorboard=gapic_tensorboard, + update_mask=update_mask, + metadata=request_metadata, + ) + + _LOGGER.log_action_started_against_resource_with_lro( + "Update", "tensorboard", self.__class__, update_tensorboard_lro + ) + + update_tensorboard_lro.result() + + _LOGGER.log_action_completed_against_resource("tensorboard", "updated", self) + + return self diff --git a/google/cloud/aiplatform/training_jobs.py b/google/cloud/aiplatform/training_jobs.py index 679bb277ab..f4ab316d2b 100644 --- a/google/cloud/aiplatform/training_jobs.py +++ b/google/cloud/aiplatform/training_jobs.py @@ -69,11 +69,12 @@ class _TrainingJob(base.VertexAiResourceNounWithFutureManager): client_class = utils.PipelineClientWithOverride - _is_client_prediction_client = False _resource_noun = "trainingPipelines" _getter_method = "get_training_pipeline" _list_method = "list_training_pipelines" _delete_method = "delete_training_pipeline" + _parse_resource_name_method = "parse_training_pipeline_path" + _format_resource_name_method = "training_pipeline_path" def __init__( self, @@ -846,13 +847,7 @@ def _get_model(self) -> Optional[models.Model]: return None if self._gca_resource.model_to_upload.name: - fields = utils.extract_fields_from_resource_name( - self._gca_resource.model_to_upload.name - ) - - return models.Model( - fields.id, project=fields.project, location=fields.location, - ) + return models.Model(model_name=self._gca_resource.model_to_upload.name) def _wait_callback(self): """Callback performs custom logging during _block_until_complete. Override in subclass.""" @@ -916,8 +911,8 @@ def has_failed(self) -> bool: def _dashboard_uri(self) -> str: """Helper method to compose the dashboard uri where training can be viewed.""" - fields = utils.extract_fields_from_resource_name(self.resource_name) - url = f"https://console.cloud.google.com/ai/platform/locations/{fields.location}/training/{fields.id}?project={fields.project}" + fields = self.parse_resource_name(self.resource_name) + url = f"https://console.cloud.google.com/ai/platform/locations/{fields['location']}/training/{fields['training_pipeline']}?project={fields['project']}" return url @property diff --git a/google/cloud/aiplatform/utils/__init__.py b/google/cloud/aiplatform/utils/__init__.py index 379ebfc179..b1aa97227d 100644 --- a/google/cloud/aiplatform/utils/__init__.py +++ b/google/cloud/aiplatform/utils/__init__.py @@ -22,7 +22,7 @@ from collections import namedtuple import logging import re -from typing import Any, Dict, Match, Optional, Type, TypeVar, Tuple +from typing import Any, Callable, Dict, Match, Optional, Type, TypeVar, Tuple from google.api_core import client_options from google.api_core import gapic_v1 @@ -143,6 +143,9 @@ def extract_fields_from_resource_name( def full_resource_name( resource_name: str, resource_noun: str, + parse_resource_name_method: Callable[[str], Dict[str,str]], + format_resource_name_method: Callable[..., str], + parent_resource_name_fields: Optional[Dict[str, str]] = None, project: Optional[str] = None, location: Optional[str] = None, ) -> str: @@ -169,69 +172,39 @@ def full_resource_name( Returns: resource_name (str): A fully-qualified Vertex AI resource name. - - Raises: - ValueError: - If resource name, resource ID or project ID not provided. """ - validate_resource_noun(resource_noun) # Fully qualified resource name, e.g., "projects/.../locations/.../datasets/12345" or # "projects/.../locations/.../metadataStores/.../contexts/12345" - valid_name = extract_fields_from_resource_name( - resource_name=resource_name, resource_noun=resource_noun - ) + fields = parse_resource_name_method(resource_name) + if fields: + return resource_name user_project = project or initializer.global_config.project user_location = location or initializer.global_config.location - # Partial resource name (i.e. "12345") with known project and location - if ( - not valid_name - and validate_project(user_project) - and validate_region(user_location) - and validate_id(resource_name) - ): - resource_name = f"projects/{user_project}/locations/{user_location}/{resource_noun}/{resource_name}" - # Invalid resource_name parameter - elif not valid_name: - raise ValueError(f"Please provide a valid {resource_noun[:-1]} name or ID") + validate_region(user_location) + validate_id(resource_name) - return resource_name + format_args = { + 'location': user_location, + 'project': user_project, + convert_camel_case_resource_noun_to_snake_case(resource_noun): resource_name, + } + if parent_resource_name_fields: + format_args.update(parent_resource_name_fields) -# TODO(b/172286889) validate resource noun -def validate_resource_noun(resource_noun: str) -> bool: - """Validates resource noun. + return format_resource_name_method(**parent_resource_name_fields) - Args: - resource_noun: resource noun to validate - Returns: - bool: True if no errors raised - Raises: - ValueError: If resource noun not supported. - """ - if resource_noun: - return True - raise ValueError("Please provide a valid resource noun") +def convert_camel_case_resource_noun_to_snake_case(resource_noun: str) -> str: + # camel to snake + snake_case = re.sub('([A-Z]+)', r'_\1', resource_noun).lower() -# TODO(b/172288287) validate project -def validate_project(project: str) -> bool: - """Validates project. + # plural to singular + return snake_case[:-1] if snake_case[-1] == 's' else snake_case - Args: - project: project to validate - Returns: - bool: True if no errors raised - Raises: - ValueError: If project does not exist. - """ - if project: - return True - raise ValueError("Please provide a valid project ID") - -# TODO(b/172932277) verify display name only contains utf-8 chars def validate_display_name(display_name: str): """Verify display name is at most 128 chars. @@ -434,6 +407,20 @@ def __getattr__(self, name: str) -> Any: def select_version(self, version: str) -> VertexAiServiceClient: return self._clients[version] + @classmethod + def get_gapic_client_class(cls, version: Optional[str] = None) -> Type[VertexAiServiceClient]: + """Gets the underyilng GAPIC client. + + Used to access class and static methods without instantiating. + + Args: + version (str): + Optional. Version of client to retreive otherwise the default version is returned. + Retuns: + Underlying GAPIC client for this wrapper and version. + """ + return dict(cls._version_map)[version or cls._default_version] + class DatasetClientWithOverride(ClientWithOverride): _is_temporary = True diff --git a/google/cloud/aiplatform/utils/console_utils.py b/google/cloud/aiplatform/utils/console_utils.py index ff9baba4cf..fd142f65b4 100644 --- a/google/cloud/aiplatform/utils/console_utils.py +++ b/google/cloud/aiplatform/utils/console_utils.py @@ -15,13 +15,16 @@ # limitations under the License. # +from google.cloud.aiplatform import jobs +from google.cloud.aiplatform import tensorboard from google.cloud.aiplatform import utils + def custom_job_console_uri(custom_job_resource_name: str) -> str: """Helper method to create console uri from custom job resource name.""" - fields = utils.extract_fields_from_resource_name(custom_job_resource_name) - return f"https://console.cloud.google.com/ai/platform/locations/{fields.location}/training/{fields.id}?project={fields.project}" + fields = jobs.CustomJob.parse_resource_name(custom_job_resource_name) + return f"https://console.cloud.google.com/ai/platform/locations/{fields['location']}/training/{fields['custom_job']}?project={fields['project']}" def custom_job_tensorboard_console_uri( @@ -29,7 +32,7 @@ def custom_job_tensorboard_console_uri( ) -> str: """Helper method to create console uri to tensorboard from custom job resource.""" # projects+40556267596+locations+us-central1+tensorboards+740208820004847616+experiments+2214368039829241856 - fields = utils.extract_fields_from_resource_name(tensorboard_resource_name) + fields = tensorboard.Tensorboard.parse_resource_name(tensorboard_resource_name) experiment_resource_name = f"{tensorboard_resource_name}/experiments/{custom_job_resource_name.split('/')[-1]}" uri_experiment_resource_name = experiment_resource_name.replace("/", "+") - return f"https://{fields.location}.tensorboard.googleusercontent.com/experiment/{uri_experiment_resource_name}" + return f"https://{fields['location']}.tensorboard.googleusercontent.com/experiment/{uri_experiment_resource_name}" diff --git a/tests/unit/aiplatform/test_jobs.py b/tests/unit/aiplatform/test_jobs.py index 9292ed27c3..107db7fb77 100644 --- a/tests/unit/aiplatform/test_jobs.py +++ b/tests/unit/aiplatform/test_jobs.py @@ -142,11 +142,11 @@ {"sampled_shapley_attribution": {"path_count": 10}} ) -_TEST_JOB_GET_METHOD_NAME = "get_fake_job" -_TEST_JOB_LIST_METHOD_NAME = "list_fake_job" -_TEST_JOB_CANCEL_METHOD_NAME = "cancel_fake_job" -_TEST_JOB_DELETE_METHOD_NAME = "delete_fake_job" -_TEST_JOB_RESOURCE_NAME = f"{_TEST_PARENT}/fakeJobs/{_TEST_ID}" +_TEST_JOB_GET_METHOD_NAME = "get_custom_job" +_TEST_JOB_LIST_METHOD_NAME = "list_custom_job" +_TEST_JOB_CANCEL_METHOD_NAME = "cancel_custom_job" +_TEST_JOB_DELETE_METHOD_NAME = "delete_custom_job" +_TEST_JOB_RESOURCE_NAME = f"{_TEST_PARENT}/customJobs/{_TEST_ID}" # TODO(b/171333554): Move reusable test fixtures to conftest.py file @@ -170,12 +170,13 @@ def fake_job_cancel_mock(): class TestJob: class FakeJob(jobs._Job): - _job_type = "fake-job" - _resource_noun = "fakeJobs" + _job_type = "custom-job" + _resource_noun = "customJobs" _getter_method = _TEST_JOB_GET_METHOD_NAME _list_method = _TEST_JOB_LIST_METHOD_NAME _cancel_method = _TEST_JOB_CANCEL_METHOD_NAME _delete_method = _TEST_JOB_DELETE_METHOD_NAME + _parse_resource_name_method = 'parse_custom_job_path' resource_name = _TEST_JOB_RESOURCE_NAME def setup_method(self): From 5072be3a763db2414dd9fc383210351cd7f49395 Mon Sep 17 00:00:00 2001 From: Sasha Sobran Date: Thu, 9 Dec 2021 10:56:40 -0500 Subject: [PATCH 02/11] refactor: Support nest resources in base class. --- google/cloud/aiplatform/base.py | 41 +++-- google/cloud/aiplatform/datasets/dataset.py | 2 +- .../aiplatform/featurestore/entity_type.py | 50 +++--- .../cloud/aiplatform/featurestore/feature.py | 66 +++---- .../aiplatform/featurestore/featurestore.py | 18 +- google/cloud/aiplatform/jobs.py | 10 +- google/cloud/aiplatform/metadata/resource.py | 20 ++- google/cloud/aiplatform/models.py | 2 + google/cloud/aiplatform/pipeline_jobs.py | 2 +- .../tensorboard/tensorboard_resource.py | 5 +- google/cloud/aiplatform/training_jobs.py | 2 +- google/cloud/aiplatform/utils/__init__.py | 126 +++++++------ .../cloud/aiplatform/utils/console_utils.py | 6 +- .../aiplatform/utils/featurestore_utils.py | 96 +--------- .../services/prediction_service/client.py | 6 +- tests/unit/aiplatform/test_endpoints.py | 2 - tests/unit/aiplatform/test_featurestores.py | 96 ++-------- tests/unit/aiplatform/test_jobs.py | 3 +- tests/unit/aiplatform/test_models.py | 3 - tests/unit/aiplatform/test_utils.py | 165 +++++++----------- 20 files changed, 270 insertions(+), 451 deletions(-) diff --git a/google/cloud/aiplatform/base.py b/google/cloud/aiplatform/base.py index 30c535c61d..51bf3ec353 100644 --- a/google/cloud/aiplatform/base.py +++ b/google/cloud/aiplatform/base.py @@ -445,7 +445,10 @@ def _parse_resource_name_method(cls) -> str: def _format_resource_name_method(self) -> str: """Method name on GAPIC client to format a resource name.""" pass - + + # Override this value with staticmethod + # to use custom resource id validators per resource + _resource_id_validator: Optional[Callable[str, None]] = None def __init__( self, @@ -499,7 +502,7 @@ def _instantiate_client( ) @classmethod - def parse_resource_name(cls, resource_name: str) -> Dict[str, str]: + def _parse_resource_name(cls, resource_name: str) -> Dict[str, str]: """ Parses resource name into it's component segments. @@ -508,18 +511,18 @@ def parse_resource_name(cls, resource_name: str) -> Dict[str, str]: Returns: Dictionary of component segments. """ - # gets the underlying wrapped gapic client class - return getattr(cls.client_class.get_gapic_client_class(), cls._parse_resource_name_method)(resource_name) + return getattr( + cls.client_class.get_gapic_client_class(), cls._parse_resource_name_method + )(resource_name) @classmethod - def format_resource_name(cls, **kwargs: str) -> str: + def _format_resource_name(cls, **kwargs: str) -> str: """ Formats a resource name it's component segments. Args: **kwargs: Resource name parts. Singular and snake case. ie: - format_resource_name( project='my-project', location='us-central1' @@ -527,11 +530,10 @@ def format_resource_name(cls, **kwargs: str) -> str: Returns: Resource name. """ - # gets the underlying wrapped gapic client class - return getattr(cls.client_class.get_gapic_client_class(), cls._format_resource_name_method)(**kwargs) - - + return getattr( + cls.client_class.get_gapic_client_class(), cls._format_resource_name_method + )(**kwargs) def _get_and_validate_project_location( self, @@ -551,21 +553,24 @@ def _get_and_validate_project_location( RuntimeError if location is different from resource location """ - fields = self.parse_resource_name(resource_name) + fields = self._parse_resource_name(resource_name) if not fields: return project, location - if location and fields['location'] != location: + if location and fields["location"] != location: raise RuntimeError( f"location {location} is provided, but different from " f"the resource location {fields['location']}" ) - return fields['project'], fields['location'] - + return fields["project"], fields["location"] - def _get_gca_resource(self, resource_name: str) -> proto.Message: + def _get_gca_resource( + self, + resource_name: str, + parent_resource_name_fields: Optional[Dict[str, str]] = None, + ) -> proto.Message: """Returns GAPIC service representation of client class resource.""" """ Args: @@ -575,10 +580,12 @@ def _get_gca_resource(self, resource_name: str) -> proto.Message: resource_name = utils.full_resource_name( resource_name=resource_name, resource_noun=self._resource_noun, - parse_resource_name_method=self.parse_resource_name, - format_resource_name_method=self.format_resource_name, + parse_resource_name_method=self._parse_resource_name, + format_resource_name_method=self._format_resource_name, project=self.project, location=self.location, + parent_resource_name_fields=parent_resource_name_fields, + resource_id_validator=self._resource_id_validator, ) return getattr(self.api_client, self._getter_method)( diff --git a/google/cloud/aiplatform/datasets/dataset.py b/google/cloud/aiplatform/datasets/dataset.py index 256119c51e..cd077bb409 100644 --- a/google/cloud/aiplatform/datasets/dataset.py +++ b/google/cloud/aiplatform/datasets/dataset.py @@ -44,7 +44,7 @@ class _Dataset(base.VertexAiResourceNounWithFutureManager): _list_method = "list_datasets" _delete_method = "delete_dataset" _parse_resource_name_method = "parse_dataset_path" - _foramt_resource_name_method = "dataset_path" + _format_resource_name_method = "dataset_path" _supported_metadata_schema_uris: Tuple[str] = () diff --git a/google/cloud/aiplatform/featurestore/entity_type.py b/google/cloud/aiplatform/featurestore/entity_type.py index 327bf1931d..b46982dce0 100644 --- a/google/cloud/aiplatform/featurestore/entity_type.py +++ b/google/cloud/aiplatform/featurestore/entity_type.py @@ -36,10 +36,22 @@ class EntityType(base.VertexAiResourceNounWithFutureManager): client_class = utils.FeaturestoreClientWithOverride _is_client_prediction_client = False - _resource_noun = None + _resource_noun = "entityTypes" _getter_method = "get_entity_type" _list_method = "list_entity_types" _delete_method = "delete_entity_type" + _parse_resource_name_method = "parse_entity_type_path" + _format_resource_name_method = "entity_type_path" + + @staticmethod + def _resource_id_validator(resource_id): + """Validates resource ID. + + Args: + resource_id(str): + The resource id to validate. + """ + featurestore_utils.validate_id(resource_id) def __init__( self, @@ -81,31 +93,26 @@ def __init__( credentials set in aiplatform.init. """ - ( - featurestore_id, - _, - ) = featurestore_utils.validate_and_get_entity_type_resource_ids( - entity_type_name=entity_type_name, featurestore_id=featurestore_id - ) - - # TODO(b/208269923): Temporary workaround, update when base class supports nested resource - self._resource_noun = f"featurestores/{featurestore_id}/entityTypes" - super().__init__( project=project, location=location, credentials=credentials, resource_name=entity_type_name, ) - self._gca_resource = self._get_gca_resource(resource_name=entity_type_name) + self._gca_resource = self._get_gca_resource( + resource_name=entity_type_name, + parent_resource_name_fields={ + featurestore.Featurestore._resource_noun: featurestore_id + } + if featurestore_id + else featurestore_id, + ) @property def featurestore_name(self) -> str: """Full qualified resource name of the managed featurestore in which this EntityType is.""" - entity_type_name_components = featurestore_utils.CompatFeaturestoreServiceClient.parse_entity_type_path( - path=self.resource_name - ) - return featurestore_utils.CompatFeaturestoreServiceClient.featurestore_path( + entity_type_name_components = self._parse_resource_name(self.resource_name) + return featurestore.Featurestore._format_resource_name( project=entity_type_name_components["project"], location=entity_type_name_components["location"], featurestore=entity_type_name_components["featurestore"], @@ -128,12 +135,10 @@ def get_feature(self, feature_id: str) -> "featurestore.Feature": Returns: featurestore.Feature - The managed feature resource object. """ - entity_type_name_components = featurestore_utils.CompatFeaturestoreServiceClient.parse_entity_type_path( - path=self.resource_name - ) + entity_type_name_components = self._parse_resource_name(self.resource_name) return featurestore.Feature( - feature_name=featurestore_utils.CompatFeaturestoreServiceClient.feature_path( + feature_name=featurestore.Feature._format_resource_name( project=entity_type_name_components["project"], location=entity_type_name_components["location"], featurestore=entity_type_name_components["featurestore"], @@ -299,9 +304,12 @@ def list( credentials=credentials, parent=utils.full_resource_name( resource_name=featurestore_name, - resource_noun="featurestores", + resource_noun=featurestore.Featurestore._resource_noun, + parse_resource_name_method=featurestore.Featurestore._parse_resource_name, + format_resource_name_method=featurestore.Featurestore._format_resource_name, project=project, location=location, + resource_id_validator=cls._resource_id_validator, ), ) diff --git a/google/cloud/aiplatform/featurestore/feature.py b/google/cloud/aiplatform/featurestore/feature.py index ab199d0c57..5dbbe67cfd 100644 --- a/google/cloud/aiplatform/featurestore/feature.py +++ b/google/cloud/aiplatform/featurestore/feature.py @@ -36,10 +36,22 @@ class Feature(base.VertexAiResourceNounWithFutureManager): client_class = utils.FeaturestoreClientWithOverride _is_client_prediction_client = False - _resource_noun = None + _resource_noun = "features" _getter_method = "get_feature" _list_method = "list_features" _delete_method = "delete_feature" + _parse_resource_name_method = "parse_feature_path" + _format_resource_name_method = "feature_path" + + @staticmethod + def _resource_id_validator(resource_id): + """Validates resource ID. + + Args: + resource_id(str): + The resource id to validate. + """ + featurestore_utils.validate_id(resource_id) def __init__( self, @@ -84,20 +96,6 @@ def __init__( Optional. Custom credentials to use to retrieve this Feature. Overrides credentials set in aiplatform.init. """ - ( - featurestore_id, - entity_type_id, - _, - ) = featurestore_utils.validate_and_get_feature_resource_ids( - feature_name=feature_name, - entity_type_id=entity_type_id, - featurestore_id=featurestore_id, - ) - - # TODO(b/208269923): Temporary workaround, update when base class supports nested resource - self._resource_noun = ( - f"featurestores/{featurestore_id}/entityTypes/{entity_type_id}/features" - ) super().__init__( project=project, @@ -105,16 +103,22 @@ def __init__( credentials=credentials, resource_name=feature_name, ) - self._gca_resource = self._get_gca_resource(resource_name=feature_name) + self._gca_resource = self._get_gca_resource( + resource_name=feature_name, + parent_resource_name_fields={ + featurestore.Featurestore._resource_noun: featurestore_id, + featurestore.EntityType._resource_noun: entity_type_id, + } + if featurestore_id + else featurestore_id, + ) @property def featurestore_name(self) -> str: """Full qualified resource name of the managed featurestore in which this Feature is.""" - feature_path_components = featurestore_utils.CompatFeaturestoreServiceClient.parse_feature_path( - path=self.resource_name - ) + feature_path_components = self._parse_resource_name(self.resource_name) - return featurestore_utils.CompatFeaturestoreServiceClient.featurestore_path( + return featurestore.Featurestore._format_resource_name( project=feature_path_components["project"], location=feature_path_components["location"], featurestore=feature_path_components["featurestore"], @@ -131,11 +135,9 @@ def get_featurestore(self) -> "featurestore.Featurestore": @property def entity_type_name(self) -> str: """Full qualified resource name of the managed entityType in which this Feature is.""" - feature_path_components = featurestore_utils.CompatFeaturestoreServiceClient.parse_feature_path( - path=self.resource_name - ) + feature_path_components = self._parse_resource_name(self.resource_name) - return featurestore_utils.CompatFeaturestoreServiceClient.entity_type_path( + return featurestore.EntityType._format_resource_name( project=feature_path_components["project"], location=feature_path_components["location"], featurestore=feature_path_components["featurestore"], @@ -303,12 +305,6 @@ def list( Returns: List[Feature] - A list of managed feature resource objects """ - ( - featurestore_id, - entity_type_id, - ) = featurestore_utils.validate_and_get_entity_type_resource_ids( - entity_type_name=entity_type_name, featurestore_id=featurestore_id, - ) return cls._list( filter=filter, @@ -318,9 +314,17 @@ def list( credentials=credentials, parent=utils.full_resource_name( resource_name=entity_type_name, - resource_noun=f"featurestores/{featurestore_id}/entityTypes", + resource_noun=featurestore.EntityType._resource_noun, + parse_resource_name_method=featurestore.EntityType._parse_resource_name, + format_resource_name_method=featurestore.EntityType._format_resource_name, + parent_resource_name_fields={ + featurestore.Featurestore._resource_noun: featurestore_id + } + if featurestore_id + else featurestore_id, project=project, location=location, + resource_id_validator=cls._resource_id_validator, ), ) diff --git a/google/cloud/aiplatform/featurestore/featurestore.py b/google/cloud/aiplatform/featurestore/featurestore.py index d3bb0a0c11..37ee5f94be 100644 --- a/google/cloud/aiplatform/featurestore/featurestore.py +++ b/google/cloud/aiplatform/featurestore/featurestore.py @@ -39,6 +39,18 @@ class Featurestore(base.VertexAiResourceNounWithFutureManager): _getter_method = "get_featurestore" _list_method = "list_featurestores" _delete_method = "delete_featurestore" + _parse_resource_name_method = "parse_featurestore_path" + _format_resource_name_method = "featurestore_path" + + @staticmethod + def _resource_id_validator(resource_id): + """Validates resource ID. + + Args: + resource_id(str): + The resource id to validate. + """ + featurestore_utils.validate_id(resource_id) def __init__( self, @@ -92,12 +104,10 @@ def get_entity_type(self, entity_type_id: str) -> "featurestore.EntityType": Returns: featurestore.EntityType - The managed entityType resource object. """ - featurestore_name_components = featurestore_utils.CompatFeaturestoreServiceClient.parse_featurestore_path( - path=self.resource_name - ) + featurestore_name_components = self._parse_resource_name(self.resource_name) return featurestore.EntityType( - entity_type_name=featurestore_utils.CompatFeaturestoreServiceClient.entity_type_path( + entity_type_name=featurestore.EntityType._format_resource_name( project=featurestore_name_components["project"], location=featurestore_name_components["location"], featurestore=featurestore_name_components["featurestore"], diff --git a/google/cloud/aiplatform/jobs.py b/google/cloud/aiplatform/jobs.py index 4cc472c886..308242033a 100644 --- a/google/cloud/aiplatform/jobs.py +++ b/google/cloud/aiplatform/jobs.py @@ -166,9 +166,9 @@ def _cancel_method(cls) -> str: def _dashboard_uri(self) -> Optional[str]: """Helper method to compose the dashboard uri where job can be viewed.""" - fields = self.parse_resource_name(self.resource_name) - location = fields.pop('location') - project = fields.pop('project') + fields = self._parse_resource_name(self.resource_name) + location = fields.pop("location") + project = fields.pop("project") job = list(fields.values())[0] url = f"https://console.cloud.google.com/ai/platform/locations/{location}/{self._job_type}/{job}?project={project}" return url @@ -536,8 +536,8 @@ def create( model_name = utils.full_resource_name( resource_name=model_name, resource_noun="models", - parse_resource_name_method=aiplatform.Model.parse_resource_name, - format_resource_name_method=aiplatform.Model.format_resource_name, + parse_resource_name_method=aiplatform.Model._parse_resource_name, + format_resource_name_method=aiplatform.Model._format_resource_name, project=project, location=location, ) diff --git a/google/cloud/aiplatform/metadata/resource.py b/google/cloud/aiplatform/metadata/resource.py index 6a6c6175b8..224748acb8 100644 --- a/google/cloud/aiplatform/metadata/resource.py +++ b/google/cloud/aiplatform/metadata/resource.py @@ -26,6 +26,7 @@ from google.auth import credentials as auth_credentials from google.cloud.aiplatform import base, initializer +from google.cloud.aiplatform import metadata from google.cloud.aiplatform import utils from google.cloud.aiplatform.compat.types import artifact as gca_artifact from google.cloud.aiplatform.compat.types import context as gca_context @@ -80,21 +81,22 @@ def __init__( if resource: self._gca_resource = resource - return - - full_resource_name = resource_name - # Construct the full_resource_name if input resource_name is the resource_id - if "/" not in resource_name: + else: full_resource_name = utils.full_resource_name( resource_name=resource_name, - resource_noun=f"metadataStores/{metadata_store_id}/{self._resource_noun}", + resource_noun=self._resource_noun, + parse_resource_name_method=self._parse_resource_name, + format_resource_name_method=self._format_resource_name, + parent_resource_name_fields={ + metadata.metadata_store._MetadataStore._resource_noun: metadata_store_id + }, project=self.project, location=self.location, ) - self._gca_resource = getattr(self.api_client, self._getter_method)( - name=full_resource_name, retry=base._DEFAULT_RETRY - ) + self._gca_resource = getattr(self.api_client, self._getter_method)( + name=full_resource_name, retry=base._DEFAULT_RETRY + ) @property def metadata(self) -> Dict: diff --git a/google/cloud/aiplatform/models.py b/google/cloud/aiplatform/models.py index 03ecbfb9a4..2240837f51 100644 --- a/google/cloud/aiplatform/models.py +++ b/google/cloud/aiplatform/models.py @@ -129,6 +129,8 @@ def __init__( endpoint_name = utils.full_resource_name( resource_name=endpoint_name, resource_noun="endpoints", + parse_resource_name_method=self._parse_resource_name, + format_resource_name_method=self._format_resource_name, project=project, location=location, ) diff --git a/google/cloud/aiplatform/pipeline_jobs.py b/google/cloud/aiplatform/pipeline_jobs.py index bcacf08939..c756589513 100644 --- a/google/cloud/aiplatform/pipeline_jobs.py +++ b/google/cloud/aiplatform/pipeline_jobs.py @@ -315,7 +315,7 @@ def has_failed(self) -> bool: def _dashboard_uri(self) -> str: """Helper method to compose the dashboard uri where pipeline can be viewed.""" - fields = self.parse_resource_name(self.resource_name) + fields = self._parse_resource_name(self.resource_name) url = f"https://console.cloud.google.com/vertex-ai/locations/{fields['location']}/pipelines/runs/{fields['pipeline_job']}?project={fields['project']}" return url diff --git a/google/cloud/aiplatform/tensorboard/tensorboard_resource.py b/google/cloud/aiplatform/tensorboard/tensorboard_resource.py index 8381319ecc..3ee6901ccc 100644 --- a/google/cloud/aiplatform/tensorboard/tensorboard_resource.py +++ b/google/cloud/aiplatform/tensorboard/tensorboard_resource.py @@ -27,11 +27,14 @@ _LOGGER = base.Logger(__name__) + class _TensorboardServiceResource(base.VertexAiResourceNounWithFutureManager): client_class = utils.TensorboardClientWithOverride + class Tensorboard(_TensorboardServiceResource): """Managed tensorboard resource for Vertex AI.""" + _resource_noun = "tensorboards" _getter_method = "get_tensorboard" _list_method = "list_tensorboards" @@ -286,6 +289,7 @@ def update( class TensorboardExperiment(_TensorboardServiceResource): """Managed tensorboard experiment resource for Vertex AI.""" + _resource_noun = "experiments" _getter_method = "get_tensorboard_experiment" _list_method = "list_tensorboard_experiment" @@ -328,7 +332,6 @@ def __init__( resource_name=tensorboard_experiment_name, ) - self._gca_resource = self._get_gca_resource(resource_name=tensorboard_name) @classmethod diff --git a/google/cloud/aiplatform/training_jobs.py b/google/cloud/aiplatform/training_jobs.py index a9bd8f1da6..17be4c2d7a 100644 --- a/google/cloud/aiplatform/training_jobs.py +++ b/google/cloud/aiplatform/training_jobs.py @@ -911,7 +911,7 @@ def has_failed(self) -> bool: def _dashboard_uri(self) -> str: """Helper method to compose the dashboard uri where training can be viewed.""" - fields = self.parse_resource_name(self.resource_name) + fields = self._parse_resource_name(self.resource_name) url = f"https://console.cloud.google.com/ai/platform/locations/{fields['location']}/training/{fields['training_pipeline']}?project={fields['project']}" return url diff --git a/google/cloud/aiplatform/utils/__init__.py b/google/cloud/aiplatform/utils/__init__.py index b7525fa4f1..9d2e28caf9 100644 --- a/google/cloud/aiplatform/utils/__init__.py +++ b/google/cloud/aiplatform/utils/__init__.py @@ -19,10 +19,9 @@ import abc import datetime import pathlib -from collections import namedtuple import logging import re -from typing import Any, Callable, Dict, Match, Optional, Type, TypeVar, Tuple +from typing import Any, Callable, Dict, Optional, Type, TypeVar, Tuple from google.api_core import client_options from google.api_core import gapic_v1 @@ -88,74 +87,32 @@ tensorboard_service_client_v1.TensorboardServiceClient, ) -RESOURCE_NAME_PATTERN = re.compile( - r"^projects\/(?P[\w-]+)\/locations\/(?P[\w-]+)\/(?P[\w\-\/]+)\/(?P[\w-]+)$" -) -RESOURCE_ID_PATTERN = re.compile(r"^[\w-]+$") - -Fields = namedtuple("Fields", ["project", "location", "resource", "id"],) - - -def _match_to_fields(match: Match) -> Optional[Fields]: - """Normalize RegEx groups from resource name pattern Match to class - Fields.""" - if not match: - return None - - return Fields( - project=match["project"], - location=match["location"], - resource=match["resource"], - id=match["id"], - ) - -def validate_id(resource_id: str) -> bool: - """Validate int64 resource ID number.""" - return bool(RESOURCE_ID_PATTERN.match(resource_id)) +RESOURCE_ID_PATTERN = re.compile(r"^[\w-]+$") -def extract_fields_from_resource_name( - resource_name: str, resource_noun: Optional[str] = None -) -> Optional[Fields]: - """Validates and returns extracted fields from a fully-qualified resource - name. Returns None if name is invalid. +def validate_id(resource_id: str): + """Validate resource ID. Args: - resource_name (str): - Required. A fully-qualified Vertex AI resource name + resource_id (str): Resource id. + Raises: + ValueError: If resource id is not a valid format. - resource_noun (str): - A resource noun to validate the resource name against. - For example, you would pass "datasets" to validate - "projects/123/locations/us-central1/datasets/456". - In the case of deeper naming structures, e.g., - "projects/123/locations/us-central1/metadataStores/123/contexts/456", - you would pass "metadataStores/123/contexts" as the resource_noun. - Returns: - fields (Fields): - A named tuple containing four extracted fields from a resource name: - project, location, resource, and id. These fields can be used for - subsequent method calls in the SDK. """ - fields = _match_to_fields(RESOURCE_NAME_PATTERN.match(resource_name)) - - if not fields: - return None - if resource_noun and fields.resource != resource_noun: - return None - - return fields + if not RESOURCE_ID_PATTERN.match(resource_id): + raise ValueError("Resource {resource_id} is not a valid resource name or id.") def full_resource_name( resource_name: str, resource_noun: str, - parse_resource_name_method: Callable[[str], Dict[str,str]], + parse_resource_name_method: Callable[[str], Dict[str, str]], format_resource_name_method: Callable[..., str], parent_resource_name_fields: Optional[Dict[str, str]] = None, project: Optional[str] = None, location: Optional[str] = None, + resource_id_validator: Optional[Callable[str, None]] = None, ) -> str: """Returns fully qualified resource name. @@ -164,18 +121,30 @@ def full_resource_name( Required. A fully-qualified Vertex AI resource name or resource ID. resource_noun (str): - A resource noun to validate the resource name against. + Required. A resource noun to validate the resource name against. For example, you would pass "datasets" to validate "projects/123/locations/us-central1/datasets/456". In the case of deeper naming structures, e.g., "projects/123/locations/us-central1/metadataStores/123/contexts/456", you would pass "metadataStores/123/contexts" as the resource_noun. + parse_resource_name_method (Callable[[str], Dict[str,str]]): + Required. Method that parses a the resource name into is segment parts. + These are generally included with GAPIC clients. + format_resource_name_method (Callable[..., str]): + Required. Method that takes segment parts of resource names and returns + the formated resource name. These are generally included with GAPIC clients. + parent_resource_name_fields (Dict[str, str]): + Optional. Dictionary of segment parts where key is the resource noun and + values are the resource ids. project (str): - Optional project to retrieve resource_noun from. If not set, project + Optional. project to retrieve resource_noun from. If not set, project set in aiplatform.init will be used. location (str): - Optional location to retrieve resource_noun from. If not set, location + Optional. location to retrieve resource_noun from. If not set, location set in aiplatform.init will be used. + resource_id_validator (Callable[str, None]): + Optional. Function that validates the resource ID. Overrides the default validator, validate_id. + Should take a resource ID as string and raise ValueError if invalid. Returns: resource_name (str): @@ -187,30 +156,51 @@ def full_resource_name( if fields: return resource_name + resource_id_validator = resource_id_validator or validate_id + user_project = project or initializer.global_config.project user_location = location or initializer.global_config.location validate_region(user_location) - validate_id(resource_name) + resource_id_validator(resource_name) format_args = { - 'location': user_location, - 'project': user_project, + "location": user_location, + "project": user_project, convert_camel_case_resource_noun_to_snake_case(resource_noun): resource_name, } if parent_resource_name_fields: - format_args.update(parent_resource_name_fields) + format_args.update( + { + convert_camel_case_resource_noun_to_snake_case(key): value + for key, value in parent_resource_name_fields.items() + } + ) + + return format_resource_name_method(**format_args) - return format_resource_name_method(**parent_resource_name_fields) + +# Resource nouns that are not plural in their resource names. +# Userd below to avoid conversion from plural to singular. +_SINGULAR_RESOURCE_NOUNS = {"time_series"} def convert_camel_case_resource_noun_to_snake_case(resource_noun: str) -> str: - # camel to snake - snake_case = re.sub('([A-Z]+)', r'_\1', resource_noun).lower() + """Converts camel case to snake case to map resource name parts to GAPIC parameter names. + + Args: + resource_noun (str): The resource noun in camel case to covert. + Returns: + Singular snake case resource noun. + """ + snake_case = re.sub("([A-Z]+)", r"_\1", resource_noun).lower() # plural to singular - return snake_case[:-1] if snake_case[-1] == 's' else snake_case + if snake_case in _SINGULAR_RESOURCE_NOUNS or not snake_case.endswith("s"): + return snake_case + else: + return snake_case[:-1] def validate_display_name(display_name: str): @@ -416,10 +406,12 @@ def select_version(self, version: str) -> VertexAiServiceClient: return self._clients[version] @classmethod - def get_gapic_client_class(cls, version: Optional[str] = None) -> Type[VertexAiServiceClient]: + def get_gapic_client_class( + cls, version: Optional[str] = None + ) -> Type[VertexAiServiceClient]: """Gets the underyilng GAPIC client. - Used to access class and static methods without instantiating. + Used to access class and static methods without instantiating. Args: version (str): diff --git a/google/cloud/aiplatform/utils/console_utils.py b/google/cloud/aiplatform/utils/console_utils.py index fd142f65b4..c108b0605e 100644 --- a/google/cloud/aiplatform/utils/console_utils.py +++ b/google/cloud/aiplatform/utils/console_utils.py @@ -17,13 +17,11 @@ from google.cloud.aiplatform import jobs from google.cloud.aiplatform import tensorboard -from google.cloud.aiplatform import utils - def custom_job_console_uri(custom_job_resource_name: str) -> str: """Helper method to create console uri from custom job resource name.""" - fields = jobs.CustomJob.parse_resource_name(custom_job_resource_name) + fields = jobs.CustomJob._parse_resource_name(custom_job_resource_name) return f"https://console.cloud.google.com/ai/platform/locations/{fields['location']}/training/{fields['custom_job']}?project={fields['project']}" @@ -32,7 +30,7 @@ def custom_job_tensorboard_console_uri( ) -> str: """Helper method to create console uri to tensorboard from custom job resource.""" # projects+40556267596+locations+us-central1+tensorboards+740208820004847616+experiments+2214368039829241856 - fields = tensorboard.Tensorboard.parse_resource_name(tensorboard_resource_name) + fields = tensorboard.Tensorboard._parse_resource_name(tensorboard_resource_name) experiment_resource_name = f"{tensorboard_resource_name}/experiments/{custom_job_resource_name.split('/')[-1]}" uri_experiment_resource_name = experiment_resource_name.replace("/", "+") return f"https://{fields['location']}.tensorboard.googleusercontent.com/experiment/{uri_experiment_resource_name}" diff --git a/google/cloud/aiplatform/utils/featurestore_utils.py b/google/cloud/aiplatform/utils/featurestore_utils.py index c78a96d185..fedcc37568 100644 --- a/google/cloud/aiplatform/utils/featurestore_utils.py +++ b/google/cloud/aiplatform/utils/featurestore_utils.py @@ -16,102 +16,12 @@ # import re -from typing import Optional, Tuple -from google.cloud.aiplatform.compat.services import featurestore_service_client - -CompatFeaturestoreServiceClient = featurestore_service_client.FeaturestoreServiceClient RESOURCE_ID_PATTERN_REGEX = r"[a-z_][a-z0-9_]{0,59}" -def validate_id(resource_id: str) -> bool: +def validate_id(resource_id: str): """Validates feature store resource ID pattern.""" - return bool(re.compile(r"^" + RESOURCE_ID_PATTERN_REGEX + r"$").match(resource_id)) - - -def validate_and_get_entity_type_resource_ids( - entity_type_name: str, featurestore_id: Optional[str] = None, -) -> Tuple[str, str]: - """Validates and gets featurestore ID and entity_type ID of the entity_type resource. - - Args: - entity_type_name (str): - Required. A fully-qualified entityType resource name or an entity_type ID - Example: "projects/123/locations/us-central1/featurestores/my_featurestore_id/entityTypes/my_entity_type_id" - or "my_entity_type_id", with featurestore_id passed. - featurestore_id (str): - Optional. Featurestore ID of the entity_type resource. - - Returns: - Tuple[str, str] - featurestore ID and entity_type ID - - Raises: - ValueError if the provided entity_type_name is not in form of a fully-qualified - entityType resource name nor an entity_type ID with featurestore_id passed. - """ - match = CompatFeaturestoreServiceClient.parse_entity_type_path( - path=entity_type_name - ) - - if match: - featurestore_id = match["featurestore"] - entity_type_id = match["entity_type"] - elif ( - validate_id(entity_type_name) - and featurestore_id - and validate_id(featurestore_id) - ): - entity_type_id = entity_type_name - else: - raise ValueError( - f"{entity_type_name} is not in form of a fully-qualified entityType resource name " - f"nor an entity_type ID with featurestore_id passed." - ) - return (featurestore_id, entity_type_id) - - -def validate_and_get_feature_resource_ids( - feature_name: str, - featurestore_id: Optional[str] = None, - entity_type_id: Optional[str] = None, -) -> Tuple[str, str, str]: - """Validates and gets featurestore ID, entity_type ID, and feature ID for the feature resource. - Args: - feature_name (str): - Required. A fully-qualified feature resource name or a feature ID. - Example: "projects/123/locations/us-central1/featurestores/my_featurestore_id/entityTypes/my_entity_type_id/features/my_feature_id" - or "my_feature_id" when project and location are initialized or passed, with featurestore_id and entity_type_id passed. - featurestore_id (str): - Optional. Featurestore ID of the feature resource. - entity_type_id (str): - Optional. EntityType ID of the feature resource. - - Returns: - Tuple[str, str, str] - featurestore ID, entity_type ID, and feature ID - - Raises: - ValueError if the provided feature_name is not in form of a fully-qualified - feature resource name nor a feature ID with featurestore_id and entity_type_id passed. - """ - - match = CompatFeaturestoreServiceClient.parse_feature_path(path=feature_name) - - if match: - featurestore_id = match["featurestore"] - entity_type_id = match["entity_type"] - feature_id = match["feature"] - elif ( - validate_id(feature_name) - and featurestore_id - and entity_type_id - and validate_id(featurestore_id) - and validate_id(entity_type_id) - ): - feature_id = feature_name - else: - raise ValueError( - f"{feature_name} is not in form of a fully-qualified feature resource name " - f"nor a feature ID with featurestore_id and entity_type_id passed." - ) - return (featurestore_id, entity_type_id, feature_id) + if not re.compile(r"^" + RESOURCE_ID_PATTERN_REGEX + r"$").match(resource_id): + raise ValueError("Resource ID {resource_id} is not a valied resource id.") diff --git a/google/cloud/aiplatform_v1/services/prediction_service/client.py b/google/cloud/aiplatform_v1/services/prediction_service/client.py index 07834c79d6..e3cb2c5a53 100644 --- a/google/cloud/aiplatform_v1/services/prediction_service/client.py +++ b/google/cloud/aiplatform_v1/services/prediction_service/client.py @@ -390,7 +390,7 @@ def predict( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> prediction_service.PredictResponse: - r"""Perform an online prediction. + """Perform an online prediction. Args: request (Union[google.cloud.aiplatform_v1.types.PredictRequest, dict]): @@ -494,7 +494,7 @@ def raw_predict( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> httpbody_pb2.HttpBody: - r"""Perform an online prediction with an arbitrary HTTP payload. + """Perform an online prediction with an arbitrary HTTP payload. The response includes the following HTTP headers: @@ -651,7 +651,7 @@ def explain( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> prediction_service.ExplainResponse: - r"""Perform an online explanation. + """Perform an online explanation. If [deployed_model_id][google.cloud.aiplatform.v1.ExplainRequest.deployed_model_id] diff --git a/tests/unit/aiplatform/test_endpoints.py b/tests/unit/aiplatform/test_endpoints.py index 4ec527e31e..4d64cff74e 100644 --- a/tests/unit/aiplatform/test_endpoints.py +++ b/tests/unit/aiplatform/test_endpoints.py @@ -378,7 +378,6 @@ def test_constructor(self, create_endpoint_client_mock): client_class=utils.EndpointClientWithOverride, credentials=initializer.global_config.credentials, location_override=_TEST_LOCATION, - prediction_client=False, ), mock.call( client_class=utils.PredictionClientWithOverride, @@ -464,7 +463,6 @@ def test_constructor_with_custom_credentials(self, create_endpoint_client_mock): client_class=utils.EndpointClientWithOverride, credentials=creds, location_override=_TEST_LOCATION, - prediction_client=False, ), mock.call( client_class=utils.PredictionClientWithOverride, diff --git a/tests/unit/aiplatform/test_featurestores.py b/tests/unit/aiplatform/test_featurestores.py index 4cede4ba09..8be6a52277 100644 --- a/tests/unit/aiplatform/test_featurestores.py +++ b/tests/unit/aiplatform/test_featurestores.py @@ -27,13 +27,10 @@ from google.cloud import aiplatform from google.cloud.aiplatform import base from google.cloud.aiplatform import initializer - from google.cloud.aiplatform.utils import featurestore_utils - from google.cloud.aiplatform_v1.services.featurestore_service import ( client as featurestore_service_client, ) - from google.cloud.aiplatform_v1.types import ( featurestore as gca_featurestore, entity_type as gca_entity_type, @@ -255,94 +252,25 @@ def delete_feature_mock(): class TestFeaturestoreUtils: @pytest.mark.parametrize( - "resource_id, expected", - [ - ("resource_id", True), - ("resource_id12345", True), - ("12345resource_id", False), - ("_resource_id", True), - ("resource_id/1234", False), - ("_resource_id/1234", False), - ("resource-id-1234", False), - ("123456", False), - ("c" * 61, False), - ("_123456", True), - ], + "resource_id", ["resource_id", "resource_id12345", "_resource_id", "_123456"], ) - def test_validate_resource_id(self, resource_id: str, expected: bool): - assert expected == featurestore_utils.validate_id(resource_id) + def test_validate_resource_id(self, resource_id: str): + featurestore_utils.validate_id(resource_id) @pytest.mark.parametrize( - "feature_name, featurestore_id, entity_type_id", + "resource_id", [ - (_TEST_FEATURE_NAME, None, None,), - (_TEST_FEATURE_ID, _TEST_FEATURESTORE_ID, _TEST_ENTITY_TYPE_ID,), + "12345resource_id", + "resource_id/1234", + "_resource_id/1234", + "resource-id-1234", + "123456", + "c" * 61, ], ) - def test_validate_and_get_feature_resource_ids( - self, feature_name: str, featurestore_id: str, entity_type_id: str, - ): - assert ( - _TEST_FEATURESTORE_ID, - _TEST_ENTITY_TYPE_ID, - _TEST_FEATURE_ID, - ) == featurestore_utils.validate_and_get_feature_resource_ids( - feature_name=feature_name, - featurestore_id=featurestore_id, - entity_type_id=entity_type_id, - ) - - @pytest.mark.parametrize( - "feature_name, featurestore_id, entity_type_id", - [ - (_TEST_FEATURE_INVALID, None, None,), - (_TEST_FEATURE_ID, None, _TEST_ENTITY_TYPE_ID,), - (_TEST_FEATURE_ID, None, None,), - (_TEST_FEATURE_ID, _TEST_FEATURESTORE_NAME, None,), - ], - ) - def test_validate_and_get_feature_resource_ids_with_raise( - self, feature_name: str, featurestore_id: str, entity_type_id: str, - ): - with pytest.raises(ValueError): - featurestore_utils.validate_and_get_feature_resource_ids( - feature_name=feature_name, - featurestore_id=featurestore_id, - entity_type_id=entity_type_id, - ) - - @pytest.mark.parametrize( - "entity_type_name, featurestore_id", - [ - (_TEST_ENTITY_TYPE_NAME, None,), - (_TEST_ENTITY_TYPE_ID, _TEST_FEATURESTORE_ID,), - ], - ) - def test_validate_and_get_entity_type_resource_ids( - self, entity_type_name: str, featurestore_id: str - ): - assert ( - _TEST_FEATURESTORE_ID, - _TEST_ENTITY_TYPE_ID, - ) == featurestore_utils.validate_and_get_entity_type_resource_ids( - entity_type_name=entity_type_name, featurestore_id=featurestore_id - ) - - @pytest.mark.parametrize( - "entity_type_name, featurestore_id", - [ - (_TEST_ENTITY_TYPE_INVALID, None,), - (_TEST_ENTITY_TYPE_ID, None,), - (_TEST_ENTITY_TYPE_ID, _TEST_FEATURESTORE_NAME,), - ], - ) - def test_validate_and_get_entity_type_resource_ids_with_raise( - self, entity_type_name: str, featurestore_id: str, - ): + def test_validate_invalid_resource_id(self, resource_id: str): with pytest.raises(ValueError): - featurestore_utils.validate_and_get_entity_type_resource_ids( - entity_type_name=entity_type_name, featurestore_id=featurestore_id - ) + featurestore_utils.validate_id(resource_id) class TestFeaturestore: diff --git a/tests/unit/aiplatform/test_jobs.py b/tests/unit/aiplatform/test_jobs.py index 107db7fb77..311c418b13 100644 --- a/tests/unit/aiplatform/test_jobs.py +++ b/tests/unit/aiplatform/test_jobs.py @@ -176,7 +176,8 @@ class FakeJob(jobs._Job): _list_method = _TEST_JOB_LIST_METHOD_NAME _cancel_method = _TEST_JOB_CANCEL_METHOD_NAME _delete_method = _TEST_JOB_DELETE_METHOD_NAME - _parse_resource_name_method = 'parse_custom_job_path' + _parse_resource_name_method = "parse_custom_job_path" + _format_resource_name_method = "custom_job_path" resource_name = _TEST_JOB_RESOURCE_NAME def setup_method(self): diff --git a/tests/unit/aiplatform/test_models.py b/tests/unit/aiplatform/test_models.py index 177cacfb55..bf87f3593d 100644 --- a/tests/unit/aiplatform/test_models.py +++ b/tests/unit/aiplatform/test_models.py @@ -454,7 +454,6 @@ def test_constructor_creates_client(self, create_client_mock): client_class=utils.ModelClientWithOverride, credentials=initializer.global_config.credentials, location_override=_TEST_LOCATION, - prediction_client=False, ) def test_constructor_create_client_with_custom_location(self, create_client_mock): @@ -468,7 +467,6 @@ def test_constructor_create_client_with_custom_location(self, create_client_mock client_class=utils.ModelClientWithOverride, credentials=initializer.global_config.credentials, location_override=_TEST_LOCATION_2, - prediction_client=False, ) def test_constructor_creates_client_with_custom_credentials( @@ -480,7 +478,6 @@ def test_constructor_creates_client_with_custom_credentials( client_class=utils.ModelClientWithOverride, credentials=creds, location_override=_TEST_LOCATION, - prediction_client=False, ) def test_constructor_gets_model(self, get_model_mock): diff --git a/tests/unit/aiplatform/test_utils.py b/tests/unit/aiplatform/test_utils.py index 928b01a889..bd91fced28 100644 --- a/tests/unit/aiplatform/test_utils.py +++ b/tests/unit/aiplatform/test_utils.py @@ -17,10 +17,7 @@ import pytest -from uuid import uuid4 -from random import choice -from random import randint -from string import ascii_letters +from typing import Callable, Dict, Optional from google.api_core import client_options from google.api_core import gapic_v1 @@ -40,97 +37,6 @@ model_service_client_default = model_service_client_v1 -@pytest.mark.parametrize( - "resource_name, expected", - [ - ("projects/123456/locations/us-central1/datasets/987654", True), - ("projects/857392/locations/us-central1/trainingPipelines/347292", True), - ("projects/acme-co-proj-1/locations/us-central1/datasets/123456", True), - ("projects/acme-co-proj-1/locations/us-central1/datasets/abcdef", True), - ("projects/acme-co-proj-1/locations/us-central1/datasets/abc-def", True), - ("project/123456/locations/us-central1/datasets/987654", False), - ("project//locations//datasets/987654", False), - ("locations/europe-west4/datasets/987654", False), - ("987654", False), - ], -) -def test_extract_fields_from_resource_name(resource_name: str, expected: bool): - # Given a resource name and expected validity, test extract_fields_from_resource_name() - assert expected == bool(utils.extract_fields_from_resource_name(resource_name)) - - -@pytest.fixture -def generated_resource_fields(): - generated_fields = utils.Fields( - project=str(uuid4()), - location=str(uuid4()), - resource="".join(choice(ascii_letters) for i in range(10)), # 10 random letters - id=str(randint(0, 100000)), - ) - - yield generated_fields - - -@pytest.fixture -def generated_resource_name(generated_resource_fields: utils.Fields): - name = ( - f"projects/{generated_resource_fields.project}/" - f"locations/{generated_resource_fields.location}" - f"/{generated_resource_fields.resource}/{generated_resource_fields.id}" - ) - - yield name - - -def test_extract_fields_from_resource_name_with_extracted_fields( - generated_resource_name: str, generated_resource_fields: utils.Fields -): - """Verify fields extracted from resource name match the original fields""" - - assert ( - utils.extract_fields_from_resource_name(resource_name=generated_resource_name) - == generated_resource_fields - ) - - -@pytest.mark.parametrize( - "resource_name, resource_noun, expected", - [ - # Expects pattern "projects/.../locations/.../datasets/..." - ("projects/123456/locations/us-central1/datasets/987654", "datasets", True), - # Expects pattern "projects/.../locations/.../batchPredictionJobs/..." - ( - "projects/857392/locations/us-central1/trainingPipelines/347292", - "batchPredictionJobs", - False, - ), - # Expects pattern "projects/.../locations/.../metadataStores/.../contexts/..." - ( - "projects/857392/locations/us-central1/metadataStores/default/contexts/123", - "metadataStores/default/contexts", - True, - ), - # Expects pattern "projects/.../locations/.../tensorboards/.../experiments/.../runs/.../timeSeries/..." - ( - "projects/857392/locations/us-central1/tensorboards/123/experiments/456/runs/789/timeSeries/1", - "tensorboards/123/experiments/456/runs/789/timeSeries", - True, - ), - ], -) -def test_extract_fields_from_resource_name_with_resource_noun( - resource_name: str, resource_noun: str, expected: bool -): - assert ( - bool( - utils.extract_fields_from_resource_name( - resource_name=resource_name, resource_noun=resource_noun - ) - ) - == expected - ) - - def test_invalid_region_raises_with_invalid_region(): with pytest.raises(ValueError): aiplatform.utils.validate_region(region="us-west4") @@ -141,42 +47,67 @@ def test_invalid_region_does_not_raise_with_valid_region(): @pytest.mark.parametrize( - "resource_noun, project, location, full_name", + "resource_noun, project, parse_resource_name_method, format_resource_name_method, parent_resource_name_fields, location, full_name", [ ( "datasets", "123456", + aiplatform.TabularDataset._parse_resource_name, + aiplatform.TabularDataset._format_resource_name, + None, "us-central1", "projects/123456/locations/us-central1/datasets/987654", ), ( "trainingPipelines", "857392", + aiplatform.CustomTrainingJob._parse_resource_name, + aiplatform.CustomTrainingJob._format_resource_name, + None, "us-west20", "projects/857392/locations/us-central1/trainingPipelines/347292", ), ( - "metadataStores/default/contexts", + "contexts", "123456", + aiplatform.metadata._Context._parse_resource_name, + aiplatform.metadata._Context._format_resource_name, + {aiplatform.metadata._MetadataStore._resource_noun: "default"}, "europe-west4", "projects/857392/locations/us-central1/metadataStores/default/contexts/123", ), ( - "tensorboards/123/experiments/456/runs/789/timeSeries", + "timeSeries", "857392", + aiplatform.gapic.TensorboardServiceClient.parse_tensorboard_time_series_path, + aiplatform.gapic.TensorboardServiceClient.tensorboard_time_series_path, + { + aiplatform.Tensorboard._resource_noun: "123", + "experiments": "456", + "runs": "789", + }, "us-central1", "projects/857392/locations/us-central1/tensorboards/123/experiments/456/runs/789/timeSeries/1", ), ], ) def test_full_resource_name_with_full_name( - resource_noun: str, project: str, location: str, full_name: str, + resource_noun: str, + project: str, + parse_resource_name_method: Callable[[str], Dict[str, str]], + format_resource_name_method: Callable[..., str], + parent_resource_name_fields: Optional[Dict[str, str]], + location: str, + full_name: str, ): # should ignore issues with other arguments as resource_name is full_name assert ( aiplatform.utils.full_resource_name( resource_name=full_name, resource_noun=resource_noun, + parse_resource_name_method=parse_resource_name_method, + format_resource_name_method=format_resource_name_method, + parent_resource_name_fields=parent_resource_name_fields, project=project, location=location, ) @@ -185,11 +116,14 @@ def test_full_resource_name_with_full_name( @pytest.mark.parametrize( - "partial_name, resource_noun, project, location, full_name", + "partial_name, resource_noun, parse_resource_name_method, format_resource_name_method, parent_resource_name_fields, project, location, full_name", [ ( "987654", "datasets", + aiplatform.TabularDataset._parse_resource_name, + aiplatform.TabularDataset._format_resource_name, + None, "123456", "us-central1", "projects/123456/locations/us-central1/datasets/987654", @@ -197,20 +131,33 @@ def test_full_resource_name_with_full_name( ( "347292", "trainingPipelines", + aiplatform.CustomTrainingJob._parse_resource_name, + aiplatform.CustomTrainingJob._format_resource_name, + None, "857392", "us-central1", "projects/857392/locations/us-central1/trainingPipelines/347292", ), ( "123", - "metadataStores/default/contexts", + "contexts", + aiplatform.metadata._Context._parse_resource_name, + aiplatform.metadata._Context._format_resource_name, + {aiplatform.metadata._MetadataStore._resource_noun: "default"}, "857392", "us-central1", "projects/857392/locations/us-central1/metadataStores/default/contexts/123", ), ( "1", - "tensorboards/123/experiments/456/runs/789/timeSeries", + "timeSeries", + aiplatform.gapic.TensorboardServiceClient.parse_tensorboard_time_series_path, + aiplatform.gapic.TensorboardServiceClient.tensorboard_time_series_path, + { + aiplatform.Tensorboard._resource_noun: "123", + "experiments": "456", + "runs": "789", + }, "857392", "us-central1", "projects/857392/locations/us-central1/tensorboards/123/experiments/456/runs/789/timeSeries/1", @@ -218,12 +165,22 @@ def test_full_resource_name_with_full_name( ], ) def test_full_resource_name_with_partial_name( - partial_name: str, resource_noun: str, project: str, location: str, full_name: str, + partial_name: str, + resource_noun: str, + parse_resource_name_method: Callable[[str], Dict[str, str]], + format_resource_name_method: Callable[..., str], + parent_resource_name_fields: Optional[Dict[str, str]], + project: str, + location: str, + full_name: str, ): assert ( aiplatform.utils.full_resource_name( resource_name=partial_name, resource_noun=resource_noun, + parse_resource_name_method=parse_resource_name_method, + format_resource_name_method=format_resource_name_method, + parent_resource_name_fields=parent_resource_name_fields, project=project, location=location, ) @@ -242,6 +199,8 @@ def test_full_resource_name_raises_value_error( aiplatform.utils.full_resource_name( resource_name=partial_name, resource_noun=resource_noun, + parse_resource_name_method=aiplatform.CustomTrainingJob._parse_resource_name, + format_resource_name_method=aiplatform.CustomTrainingJob._format_resource_name, project=project, location=location, ) From 7fc2fbf747acd9f17c64d0e1c6360c4d19d7a1cb Mon Sep 17 00:00:00 2001 From: Sasha Sobran Date: Thu, 9 Dec 2021 10:59:36 -0500 Subject: [PATCH 03/11] refactor: Remove TensorboardExperiment for later implementation --- .../tensorboard/tensorboard_resource.py | 259 ------------------ 1 file changed, 259 deletions(-) diff --git a/google/cloud/aiplatform/tensorboard/tensorboard_resource.py b/google/cloud/aiplatform/tensorboard/tensorboard_resource.py index 3ee6901ccc..789d9e2dbc 100644 --- a/google/cloud/aiplatform/tensorboard/tensorboard_resource.py +++ b/google/cloud/aiplatform/tensorboard/tensorboard_resource.py @@ -285,262 +285,3 @@ def update( _LOGGER.log_action_completed_against_resource("tensorboard", "updated", self) return self - - -class TensorboardExperiment(_TensorboardServiceResource): - """Managed tensorboard experiment resource for Vertex AI.""" - - _resource_noun = "experiments" - _getter_method = "get_tensorboard_experiment" - _list_method = "list_tensorboard_experiment" - _delete_method = "delete_tensorboard_experiment" - _parse_resource_name_method = "parse_tensorboard_experiment_path" - _format_resource_name_method = "tensorboard_experiment_path" - - def __init__( - self, - tensorboard_experiment_name: str, - tensorboard_name: Optional[str] = None, - project: Optional[str] = None, - location: Optional[str] = None, - credentials: Optional[auth_credentials.Credentials] = None, - ): - """Retrieves an existing managed tensorboard experiment given a tensorboard experiment name or ID. - - Args: - tensorboard_experiment_name (str): - Required: A fully-qualified tensorboard resource experiment name or tensorboard experiment ID. - Example: "projects/123/locations/us-central1/tensorboards/456/experiments/123" or - "123" when tensorboard_name is passed and project and location are initialized or passed. - tensorboard_name (str): - Optional. Tensorboard resource id. - project (str): - Optional. Project to retrieve tensorboard from. If not set, project - set in aiplatform.init will be used. - location (str): - Optional. Location to retrieve tensorboard from. If not set, location - set in aiplatform.init will be used. - credentials (auth_credentials.Credentials): - Optional. Custom credentials to use to retrieve this Tensorboard. Overrides - credentials set in aiplatform.init. - """ - - super().__init__( - project=project, - location=location, - credentials=credentials, - resource_name=tensorboard_experiment_name, - ) - - self._gca_resource = self._get_gca_resource(resource_name=tensorboard_name) - - @classmethod - def create( - cls, - display_name: str, - description: Optional[str] = None, - labels: Optional[Dict[str, str]] = None, - project: Optional[str] = None, - location: Optional[str] = None, - credentials: Optional[auth_credentials.Credentials] = None, - request_metadata: Optional[Sequence[Tuple[str, str]]] = (), - encryption_spec_key_name: Optional[str] = None, - ) -> "Tensorboard": - """Creates a new tensorboard. - - Example Usage: - - tb = aiplatform.Tensorboard.create( - display_name='my display name', - description='my description', - labels={ - 'key1': 'value1', - 'key2': 'value2' - } - ) - - Args: - display_name (str): - Required. The user-defined name of the Tensorboard. - The name can be up to 128 characters long and can be consist - of any UTF-8 characters. - description (str): - Optional. Description of this Tensorboard. - labels (Dict[str, str]): - Optional. Labels with user-defined metadata to organize your Tensorboards. - Label keys and values can be no longer than 64 characters - (Unicode codepoints), can only contain lowercase letters, numeric - characters, underscores and dashes. International characters are allowed. - No more than 64 user labels can be associated with one Tensorboard - (System labels are excluded). - See https://goo.gl/xmQnxf for more information and examples of labels. - System reserved label keys are prefixed with "aiplatform.googleapis.com/" - and are immutable. - project (str): - Optional. Project to upload this model to. Overrides project set in - aiplatform.init. - location (str): - Optional. Location to upload this model to. Overrides location set in - aiplatform.init. - credentials (auth_credentials.Credentials): - Optional. Custom credentials to use to upload this model. Overrides - credentials set in aiplatform.init. - request_metadata (Sequence[Tuple[str, str]]): - Optional. Strings which should be sent along with the request as metadata. - encryption_spec_key_name (str): - Optional. Cloud KMS resource identifier of the customer - managed encryption key used to protect the tensorboard. Has the - form: - ``projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key``. - The key needs to be in the same region as where the compute - resource is created. - - If set, this Tensorboard and all sub-resources of this Tensorboard will be secured by this key. - - Overrides encryption_spec_key_name set in aiplatform.init. - - Returns: - tensorboard (Tensorboard): - Instantiated representation of the managed tensorboard resource. - """ - - utils.validate_display_name(display_name) - if labels: - utils.validate_labels(labels) - - api_client = cls._instantiate_client(location=location, credentials=credentials) - - parent = initializer.global_config.common_location_path( - project=project, location=location - ) - - encryption_spec = initializer.global_config.get_encryption_spec( - encryption_spec_key_name=encryption_spec_key_name - ) - - gapic_tensorboard = gca_tensorboard.Tensorboard( - display_name=display_name, - description=description, - labels=labels, - encryption_spec=encryption_spec, - ) - - create_tensorboard_lro = api_client.create_tensorboard( - parent=parent, tensorboard=gapic_tensorboard, metadata=request_metadata - ) - - _LOGGER.log_create_with_lro(cls, create_tensorboard_lro) - - created_tensorboard = create_tensorboard_lro.result() - - _LOGGER.log_create_complete(cls, created_tensorboard, "tb") - - return cls( - tensorboard_name=created_tensorboard.name, - project=project or initializer.global_config.project, - location=location or initializer.global_config.location, - credentials=credentials, - ) - - def update( - self, - display_name: Optional[str] = None, - description: Optional[str] = None, - labels: Optional[Dict[str, str]] = None, - request_metadata: Optional[Sequence[Tuple[str, str]]] = (), - encryption_spec_key_name: Optional[str] = None, - ) -> "Tensorboard": - """Updates an existing tensorboard. - - Example Usage: - - tb = aiplatform.Tensorboard(tensorboard_name='123456') - tb.update( - display_name='update my display name', - description='update my description', - ) - - Args: - display_name (str): - Optional. User-defined name of the Tensorboard. - The name can be up to 128 characters long and can be consist - of any UTF-8 characters. - description (str): - Optional. Description of this Tensorboard. - labels (Dict[str, str]): - Optional. Labels with user-defined metadata to organize your Tensorboards. - Label keys and values can be no longer than 64 characters - (Unicode codepoints), can only contain lowercase letters, numeric - characters, underscores and dashes. International characters are allowed. - No more than 64 user labels can be associated with one Tensorboard - (System labels are excluded). - See https://goo.gl/xmQnxf for more information and examples of labels. - System reserved label keys are prefixed with "aiplatform.googleapis.com/" - and are immutable. - request_metadata (Sequence[Tuple[str, str]]): - Optional. Strings which should be sent along with the request as metadata. - encryption_spec_key_name (str): - Optional. Cloud KMS resource identifier of the customer - managed encryption key used to protect the tensorboard. Has the - form: - ``projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key``. - The key needs to be in the same region as where the compute - resource is created. - - If set, this Tensorboard and all sub-resources of this Tensorboard will be secured by this key. - - Overrides encryption_spec_key_name set in aiplatform.init. - - Returns: - tensorboard (Tensorboard): - The managed tensorboard resource. - """ - update_mask = list() - - if display_name: - utils.validate_display_name(display_name) - update_mask.append("display_name") - - if description: - update_mask.append("description") - - if labels: - utils.validate_labels(labels) - update_mask.append("labels") - - encryption_spec = None - if encryption_spec_key_name: - encryption_spec = initializer.global_config.get_encryption_spec( - encryption_spec_key_name=encryption_spec_key_name, - ) - update_mask.append("encryption_spec") - - update_mask = field_mask_pb2.FieldMask(paths=update_mask) - - gapic_tensorboard = gca_tensorboard.Tensorboard( - name=self.resource_name, - display_name=display_name, - description=description, - labels=labels, - encryption_spec=encryption_spec, - ) - - _LOGGER.log_action_start_against_resource( - "Updating", "tensorboard", self, - ) - - update_tensorboard_lro = self.api_client.update_tensorboard( - tensorboard=gapic_tensorboard, - update_mask=update_mask, - metadata=request_metadata, - ) - - _LOGGER.log_action_started_against_resource_with_lro( - "Update", "tensorboard", self.__class__, update_tensorboard_lro - ) - - update_tensorboard_lro.result() - - _LOGGER.log_action_completed_against_resource("tensorboard", "updated", self) - - return self From 15cb93db6fd5327e548d4c0c01d842174fc02601 Mon Sep 17 00:00:00 2001 From: Sasha Sobran Date: Thu, 9 Dec 2021 11:09:06 -0500 Subject: [PATCH 04/11] fix: fix type hint --- google/cloud/aiplatform/utils/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/google/cloud/aiplatform/utils/__init__.py b/google/cloud/aiplatform/utils/__init__.py index 9d2e28caf9..9db8f10e3a 100644 --- a/google/cloud/aiplatform/utils/__init__.py +++ b/google/cloud/aiplatform/utils/__init__.py @@ -112,7 +112,7 @@ def full_resource_name( parent_resource_name_fields: Optional[Dict[str, str]] = None, project: Optional[str] = None, location: Optional[str] = None, - resource_id_validator: Optional[Callable[str, None]] = None, + resource_id_validator: Optional[Callable[[str], None]] = None, ) -> str: """Returns fully qualified resource name. From 8795a88f0277535f83aee5389ebee59696a454ec Mon Sep 17 00:00:00 2001 From: Sasha Sobran Date: Thu, 9 Dec 2021 11:36:53 -0500 Subject: [PATCH 05/11] fix: fix type hint --- google/cloud/aiplatform/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/google/cloud/aiplatform/base.py b/google/cloud/aiplatform/base.py index 51bf3ec353..f30946dc19 100644 --- a/google/cloud/aiplatform/base.py +++ b/google/cloud/aiplatform/base.py @@ -448,7 +448,7 @@ def _format_resource_name_method(self) -> str: # Override this value with staticmethod # to use custom resource id validators per resource - _resource_id_validator: Optional[Callable[str, None]] = None + _resource_id_validator: Optional[Callable[[str], None]] = None def __init__( self, From 8f8ebab8b892e3b387d7792c083b17764431d7c4 Mon Sep 17 00:00:00 2001 From: sasha-gitg <44654632+sasha-gitg@users.noreply.github.com> Date: Mon, 13 Dec 2021 13:35:17 -0500 Subject: [PATCH 06/11] Apply suggestions from code review Co-authored-by: Morgan Du --- google/cloud/aiplatform/base.py | 8 ++++---- google/cloud/aiplatform/featurestore/entity_type.py | 2 +- google/cloud/aiplatform/featurestore/feature.py | 2 +- google/cloud/aiplatform/utils/__init__.py | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/google/cloud/aiplatform/base.py b/google/cloud/aiplatform/base.py index 54637934b3..2d9df7013a 100644 --- a/google/cloud/aiplatform/base.py +++ b/google/cloud/aiplatform/base.py @@ -504,7 +504,7 @@ def _instantiate_client( @classmethod def _parse_resource_name(cls, resource_name: str) -> Dict[str, str]: """ - Parses resource name into it's component segments. + Parses resource name into its component segments. Args: resource_name: Resource name of this resource. @@ -519,7 +519,7 @@ def _parse_resource_name(cls, resource_name: str) -> Dict[str, str]: @classmethod def _format_resource_name(cls, **kwargs: str) -> str: """ - Formats a resource name it's component segments. + Formats a resource name using its component segments. Args: **kwargs: Resource name parts. Singular and snake case. ie: @@ -571,8 +571,8 @@ def _get_gca_resource( resource_name: str, parent_resource_name_fields: Optional[Dict[str, str]] = None, ) -> proto.Message: - """Returns GAPIC service representation of client class resource.""" - """ + """Returns GAPIC service representation of client class resource. + Args: resource_name (str): Required. A fully-qualified resource name or ID. diff --git a/google/cloud/aiplatform/featurestore/entity_type.py b/google/cloud/aiplatform/featurestore/entity_type.py index b46982dce0..a4aa295052 100644 --- a/google/cloud/aiplatform/featurestore/entity_type.py +++ b/google/cloud/aiplatform/featurestore/entity_type.py @@ -309,7 +309,7 @@ def list( format_resource_name_method=featurestore.Featurestore._format_resource_name, project=project, location=location, - resource_id_validator=cls._resource_id_validator, + resource_id_validator=featurestore.Featurestore._resource_id_validator, ), ) diff --git a/google/cloud/aiplatform/featurestore/feature.py b/google/cloud/aiplatform/featurestore/feature.py index 5dbbe67cfd..a71096f9f8 100644 --- a/google/cloud/aiplatform/featurestore/feature.py +++ b/google/cloud/aiplatform/featurestore/feature.py @@ -324,7 +324,7 @@ def list( else featurestore_id, project=project, location=location, - resource_id_validator=cls._resource_id_validator, + resource_id_validator=featurestore.EntityType._resource_id_validator, ), ) diff --git a/google/cloud/aiplatform/utils/__init__.py b/google/cloud/aiplatform/utils/__init__.py index 9db8f10e3a..afb47fdf4d 100644 --- a/google/cloud/aiplatform/utils/__init__.py +++ b/google/cloud/aiplatform/utils/__init__.py @@ -101,7 +101,7 @@ def validate_id(resource_id: str): """ if not RESOURCE_ID_PATTERN.match(resource_id): - raise ValueError("Resource {resource_id} is not a valid resource name or id.") + raise ValueError(f"Resource {resource_id} is not a valid resource id.") def full_resource_name( @@ -128,7 +128,7 @@ def full_resource_name( "projects/123/locations/us-central1/metadataStores/123/contexts/456", you would pass "metadataStores/123/contexts" as the resource_noun. parse_resource_name_method (Callable[[str], Dict[str,str]]): - Required. Method that parses a the resource name into is segment parts. + Required. Method that parses a resource name into its segment parts. These are generally included with GAPIC clients. format_resource_name_method (Callable[..., str]): Required. Method that takes segment parts of resource names and returns From e4b665255d53553a3e28f55bfc80146e9e9bab84 Mon Sep 17 00:00:00 2001 From: Sasha Sobran Date: Mon, 13 Dec 2021 14:15:04 -0500 Subject: [PATCH 07/11] Address review comments. --- google/cloud/aiplatform/base.py | 9 ++++++--- .../cloud/aiplatform/featurestore/entity_type.py | 2 +- google/cloud/aiplatform/featurestore/feature.py | 9 ++++++++- .../aiplatform/featurestore/featurestore.py | 2 +- google/cloud/aiplatform/initializer.py | 1 - google/cloud/aiplatform/utils/__init__.py | 7 ++++--- tests/unit/aiplatform/test_featurestores.py | 16 ++++++++++++++++ 7 files changed, 36 insertions(+), 10 deletions(-) diff --git a/google/cloud/aiplatform/base.py b/google/cloud/aiplatform/base.py index 2d9df7013a..a3e8c352b0 100644 --- a/google/cloud/aiplatform/base.py +++ b/google/cloud/aiplatform/base.py @@ -572,10 +572,13 @@ def _get_gca_resource( parent_resource_name_fields: Optional[Dict[str, str]] = None, ) -> proto.Message: """Returns GAPIC service representation of client class resource. - + Args: - resource_name (str): - Required. A fully-qualified resource name or ID. + resource_name (str): Required. A fully-qualified resource name or ID. + parent_resource_name_fields (Dict[str,str]): + Optional. Mapping of parent resource name key to values. These + will be used to compose the resource name if only resource ID is given. + Should not include project and location. """ resource_name = utils.full_resource_name( resource_name=resource_name, diff --git a/google/cloud/aiplatform/featurestore/entity_type.py b/google/cloud/aiplatform/featurestore/entity_type.py index a4aa295052..f777d2ed41 100644 --- a/google/cloud/aiplatform/featurestore/entity_type.py +++ b/google/cloud/aiplatform/featurestore/entity_type.py @@ -44,7 +44,7 @@ class EntityType(base.VertexAiResourceNounWithFutureManager): _format_resource_name_method = "entity_type_path" @staticmethod - def _resource_id_validator(resource_id): + def _resource_id_validator(resource_id: str): """Validates resource ID. Args: diff --git a/google/cloud/aiplatform/featurestore/feature.py b/google/cloud/aiplatform/featurestore/feature.py index a71096f9f8..ada0816037 100644 --- a/google/cloud/aiplatform/featurestore/feature.py +++ b/google/cloud/aiplatform/featurestore/feature.py @@ -44,7 +44,7 @@ class Feature(base.VertexAiResourceNounWithFutureManager): _format_resource_name_method = "feature_path" @staticmethod - def _resource_id_validator(resource_id): + def _resource_id_validator(resource_id: str): """Validates resource ID. Args: @@ -95,8 +95,15 @@ def __init__( credentials (auth_credentials.Credentials): Optional. Custom credentials to use to retrieve this Feature. Overrides credentials set in aiplatform.init. + Raises: + ValueError: If only one of featurestore_id or entity_type_id is provided. """ + if bool(featurestore_id) != bool(entity_type_id): + raise ValueError( + "featurestore_id and entity_type_id must both be provided or ommitted." + ) + super().__init__( project=project, location=location, diff --git a/google/cloud/aiplatform/featurestore/featurestore.py b/google/cloud/aiplatform/featurestore/featurestore.py index 37ee5f94be..9194b1f3a9 100644 --- a/google/cloud/aiplatform/featurestore/featurestore.py +++ b/google/cloud/aiplatform/featurestore/featurestore.py @@ -43,7 +43,7 @@ class Featurestore(base.VertexAiResourceNounWithFutureManager): _format_resource_name_method = "featurestore_path" @staticmethod - def _resource_id_validator(resource_id): + def _resource_id_validator(resource_id: str): """Validates resource ID. Args: diff --git a/google/cloud/aiplatform/initializer.py b/google/cloud/aiplatform/initializer.py index 0043ce1a2e..00f6b19b40 100644 --- a/google/cloud/aiplatform/initializer.py +++ b/google/cloud/aiplatform/initializer.py @@ -60,7 +60,6 @@ def init( staging_bucket: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, encryption_spec_key_name: Optional[str] = None, - tensorboard_resource: Optional[str] = None, ): """Updates common initialization parameters with provided options. diff --git a/google/cloud/aiplatform/utils/__init__.py b/google/cloud/aiplatform/utils/__init__.py index afb47fdf4d..195cae2732 100644 --- a/google/cloud/aiplatform/utils/__init__.py +++ b/google/cloud/aiplatform/utils/__init__.py @@ -124,9 +124,6 @@ def full_resource_name( Required. A resource noun to validate the resource name against. For example, you would pass "datasets" to validate "projects/123/locations/us-central1/datasets/456". - In the case of deeper naming structures, e.g., - "projects/123/locations/us-central1/metadataStores/123/contexts/456", - you would pass "metadataStores/123/contexts" as the resource_noun. parse_resource_name_method (Callable[[str], Dict[str,str]]): Required. Method that parses a resource name into its segment parts. These are generally included with GAPIC clients. @@ -136,6 +133,10 @@ def full_resource_name( parent_resource_name_fields (Dict[str, str]): Optional. Dictionary of segment parts where key is the resource noun and values are the resource ids. + For example: + { + "metadataStores": "123" + } project (str): Optional. project to retrieve resource_noun from. If not set, project set in aiplatform.init will be used. diff --git a/tests/unit/aiplatform/test_featurestores.py b/tests/unit/aiplatform/test_featurestores.py index 8be6a52277..5a93c8efd5 100644 --- a/tests/unit/aiplatform/test_featurestores.py +++ b/tests/unit/aiplatform/test_featurestores.py @@ -572,6 +572,22 @@ def test_init_feature( name=_TEST_FEATURE_NAME, retry=base._DEFAULT_RETRY ) + def test_init_feature_raises_with_only_featurestore_id(self): + aiplatform.init(project=_TEST_PROJECT) + + with pytest.raises(ValueError): + aiplatform.Feature( + feature_name=_TEST_FEATURE_NAME, featurestore_id=_TEST_FEATURESTORE_ID, + ) + + def test_init_feature_raises_with_only_entity_type_id(self): + aiplatform.init(project=_TEST_PROJECT) + + with pytest.raises(ValueError): + aiplatform.Feature( + feature_name=_TEST_FEATURE_NAME, entity_type_id=_TEST_ENTITY_TYPE_ID, + ) + @pytest.mark.usefixtures("get_feature_mock") def test_get_featurestore(self, get_featurestore_mock): aiplatform.init(project=_TEST_PROJECT) From 73b17d57127345b3113407e7bcf88671f9846763 Mon Sep 17 00:00:00 2001 From: Sasha Sobran Date: Tue, 14 Dec 2021 09:50:26 -0500 Subject: [PATCH 08/11] checkpoint --- .../tensorboard/tensorboard_resource.py | 278 ++++++++++++++++++ 1 file changed, 278 insertions(+) diff --git a/google/cloud/aiplatform/tensorboard/tensorboard_resource.py b/google/cloud/aiplatform/tensorboard/tensorboard_resource.py index 789d9e2dbc..19b452166f 100644 --- a/google/cloud/aiplatform/tensorboard/tensorboard_resource.py +++ b/google/cloud/aiplatform/tensorboard/tensorboard_resource.py @@ -285,3 +285,281 @@ def update( _LOGGER.log_action_completed_against_resource("tensorboard", "updated", self) return self + + +class TensorboardExperiment(_TensorboardServiceResource): + """Managed tensorboard resource for Vertex AI.""" + + _resource_noun = "experiments" + _getter_method = "get_tensorboard_experiment" + _list_method = "list_tensorboard_experiments" + _delete_method = "delete_tensorboard_experiment" + _parse_resource_name_method = "parse_tensorboard_experiment_path" + _format_resource_name_method = "tensorboard_experiment_path" + + def __init__( + self, + tensorboard_experiment_name: str, + tensorboard_id: Optional[str] = None, + project: Optional[str] = None, + location: Optional[str] = None, + credentials: Optional[auth_credentials.Credentials] = None, + ): + """Retrieves an existing tensorboard experiment given a tensorboard experiment name or ID. + + Example Usage: + + tb_exp = aiplatform.TensorboardExperiment( + tensorboard_experiment_name= "projects/123/locations/us-central1/tensorboards/456/experiments/678" + ) + + tb_exp = aiplatform.TensorboardExperiment( + tensorboard_experiment_name= "678" + tensorboard_id = "456" + ) + + Args: + tensorboard_experiment_name (str): + Required. A fully-qualified tensorboard experiment resource name or resource ID. + Example: "projects/123/locations/us-central1/tensorboards/456/experiments/678" or + "678" when tensorboard_id is passed and project and location are initialized or passed. + tensorboard_id (str): + Optional. A tensorboard resource ID. + project (str): + Optional. Project to retrieve tensorboard from. If not set, project + set in aiplatform.init will be used. + location (str): + Optional. Location to retrieve tensorboard from. If not set, location + set in aiplatform.init will be used. + credentials (auth_credentials.Credentials): + Optional. Custom credentials to use to retrieve this Tensorboard. Overrides + credentials set in aiplatform.init. + """ + + super().__init__( + project=project, + location=location, + credentials=credentials, + resource_name=tensorboard_experiment_name, + ) + self._gca_resource = self._get_gca_resource( + resource_name=tensorboard_experiment_name, + parent_resource_name_fields={ + Tensorboard._resource_noun: tensorboard_id + } if tensorboard_id else tensorboard_id) + + @classmethod + def create( + cls, + tensorboard_id: str, + tensorboard_experiment_id: Optional[str] = None, + display_name: Optional[str] = None, + description: Optional[str] = None, + labels: Optional[Dict[str, str]] = None, + project: Optional[str] = None, + location: Optional[str] = None, + credentials: Optional[auth_credentials.Credentials] = None, + request_metadata: Optional[Sequence[Tuple[str, str]]] = (), + encryption_spec_key_name: Optional[str] = None, + ) -> "Tensorboard": + """Creates a new tensorboard. + + Example Usage: + + tb = aiplatform.TensorboardExperiment.create( + display_name='my display name', + tensorboard_id='456' + tensorboard_experiment_id='my-experiment' + description='my description', + labels={ + 'key1': 'value1', + 'key2': 'value2' + } + ) + + Args: + display_name (str): + Required. The user-defined name of the Tensorboard Experiment. + The name can be up to 128 characters long and can be consist + of any UTF-8 characters. + tensorboard_id + description (str): + Optional. Description of this Tensorboard. + labels (Dict[str, str]): + Optional. Labels with user-defined metadata to organize your Tensorboards. + Label keys and values can be no longer than 64 characters + (Unicode codepoints), can only contain lowercase letters, numeric + characters, underscores and dashes. International characters are allowed. + No more than 64 user labels can be associated with one Tensorboard + (System labels are excluded). + See https://goo.gl/xmQnxf for more information and examples of labels. + System reserved label keys are prefixed with "aiplatform.googleapis.com/" + and are immutable. + project (str): + Optional. Project to upload this model to. Overrides project set in + aiplatform.init. + location (str): + Optional. Location to upload this model to. Overrides location set in + aiplatform.init. + credentials (auth_credentials.Credentials): + Optional. Custom credentials to use to upload this model. Overrides + credentials set in aiplatform.init. + request_metadata (Sequence[Tuple[str, str]]): + Optional. Strings which should be sent along with the request as metadata. + encryption_spec_key_name (str): + Optional. Cloud KMS resource identifier of the customer + managed encryption key used to protect the tensorboard. Has the + form: + ``projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key``. + The key needs to be in the same region as where the compute + resource is created. + + If set, this Tensorboard and all sub-resources of this Tensorboard will be secured by this key. + + Overrides encryption_spec_key_name set in aiplatform.init. + + Returns: + tensorboard (Tensorboard): + Instantiated representation of the managed tensorboard resource. + """ + + utils.validate_display_name(display_name) + if labels: + utils.validate_labels(labels) + + api_client = cls._instantiate_client(location=location, credentials=credentials) + + parent = initializer.global_config.common_location_path( + project=project, location=location + ) + + encryption_spec = initializer.global_config.get_encryption_spec( + encryption_spec_key_name=encryption_spec_key_name + ) + + gapic_tensorboard = gca_tensorboard.Tensorboard( + display_name=display_name, + description=description, + labels=labels, + encryption_spec=encryption_spec, + ) + + create_tensorboard_lro = api_client.create_tensorboard( + parent=parent, tensorboard=gapic_tensorboard, metadata=request_metadata + ) + + _LOGGER.log_create_with_lro(cls, create_tensorboard_lro) + + created_tensorboard = create_tensorboard_lro.result() + + _LOGGER.log_create_complete(cls, created_tensorboard, "tb") + + return cls( + tensorboard_name=created_tensorboard.name, + project=project or initializer.global_config.project, + location=location or initializer.global_config.location, + credentials=credentials, + ) + + def update( + self, + display_name: Optional[str] = None, + description: Optional[str] = None, + labels: Optional[Dict[str, str]] = None, + request_metadata: Optional[Sequence[Tuple[str, str]]] = (), + encryption_spec_key_name: Optional[str] = None, + ) -> "Tensorboard": + """Updates an existing tensorboard. + + Example Usage: + + tb = aiplatform.Tensorboard(tensorboard_name='123456') + tb.update( + display_name='update my display name', + description='update my description', + ) + + Args: + display_name (str): + Optional. User-defined name of the Tensorboard. + The name can be up to 128 characters long and can be consist + of any UTF-8 characters. + description (str): + Optional. Description of this Tensorboard. + labels (Dict[str, str]): + Optional. Labels with user-defined metadata to organize your Tensorboards. + Label keys and values can be no longer than 64 characters + (Unicode codepoints), can only contain lowercase letters, numeric + characters, underscores and dashes. International characters are allowed. + No more than 64 user labels can be associated with one Tensorboard + (System labels are excluded). + See https://goo.gl/xmQnxf for more information and examples of labels. + System reserved label keys are prefixed with "aiplatform.googleapis.com/" + and are immutable. + request_metadata (Sequence[Tuple[str, str]]): + Optional. Strings which should be sent along with the request as metadata. + encryption_spec_key_name (str): + Optional. Cloud KMS resource identifier of the customer + managed encryption key used to protect the tensorboard. Has the + form: + ``projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key``. + The key needs to be in the same region as where the compute + resource is created. + + If set, this Tensorboard and all sub-resources of this Tensorboard will be secured by this key. + + Overrides encryption_spec_key_name set in aiplatform.init. + + Returns: + tensorboard (Tensorboard): + The managed tensorboard resource. + """ + update_mask = list() + + if display_name: + utils.validate_display_name(display_name) + update_mask.append("display_name") + + if description: + update_mask.append("description") + + if labels: + utils.validate_labels(labels) + update_mask.append("labels") + + encryption_spec = None + if encryption_spec_key_name: + encryption_spec = initializer.global_config.get_encryption_spec( + encryption_spec_key_name=encryption_spec_key_name, + ) + update_mask.append("encryption_spec") + + update_mask = field_mask_pb2.FieldMask(paths=update_mask) + + gapic_tensorboard = gca_tensorboard.Tensorboard( + name=self.resource_name, + display_name=display_name, + description=description, + labels=labels, + encryption_spec=encryption_spec, + ) + + _LOGGER.log_action_start_against_resource( + "Updating", "tensorboard", self, + ) + + update_tensorboard_lro = self.api_client.update_tensorboard( + tensorboard=gapic_tensorboard, + update_mask=update_mask, + metadata=request_metadata, + ) + + _LOGGER.log_action_started_against_resource_with_lro( + "Update", "tensorboard", self.__class__, update_tensorboard_lro + ) + + update_tensorboard_lro.result() + + _LOGGER.log_action_completed_against_resource("tensorboard", "updated", self) + + return self From 2ac31859e1afc9dce449518bf2d8642e7425e545 Mon Sep 17 00:00:00 2001 From: Sasha Sobran Date: Wed, 15 Dec 2021 12:35:13 -0500 Subject: [PATCH 09/11] feat: Add support to create TensorboardExperiment --- google/cloud/aiplatform/__init__.py | 5 +- .../cloud/aiplatform/tensorboard/__init__.py | 7 +- .../tensorboard/tensorboard_resource.py | 254 ++++++++---------- tests/system/aiplatform/test_tensorboard.py | 22 ++ tests/unit/aiplatform/test_tensorboard.py | 158 ++++++++++- 5 files changed, 292 insertions(+), 154 deletions(-) diff --git a/google/cloud/aiplatform/__init__.py b/google/cloud/aiplatform/__init__.py index 3e206a5538..626baa06f5 100644 --- a/google/cloud/aiplatform/__init__.py +++ b/google/cloud/aiplatform/__init__.py @@ -47,7 +47,7 @@ HyperparameterTuningJob, ) from google.cloud.aiplatform.pipeline_jobs import PipelineJob -from google.cloud.aiplatform.tensorboard import Tensorboard +from google.cloud.aiplatform.tensorboard import Tensorboard, TensorboardExperiment from google.cloud.aiplatform.training_jobs import ( CustomTrainingJob, CustomContainerTrainingJob, @@ -105,8 +105,9 @@ "Model", "PipelineJob", "TabularDataset", + "Tensorboard", + "TensorboardExperiment", "TextDataset", "TimeSeriesDataset", "VideoDataset", - "Tensorboard", ) diff --git a/google/cloud/aiplatform/tensorboard/__init__.py b/google/cloud/aiplatform/tensorboard/__init__.py index f4b1c0b105..e12b441143 100644 --- a/google/cloud/aiplatform/tensorboard/__init__.py +++ b/google/cloud/aiplatform/tensorboard/__init__.py @@ -15,7 +15,10 @@ # limitations under the License. # -from google.cloud.aiplatform.tensorboard.tensorboard_resource import Tensorboard +from google.cloud.aiplatform.tensorboard.tensorboard_resource import ( + Tensorboard, + TensorboardExperiment, +) -__all__ = ("Tensorboard",) +__all__ = ("Tensorboard", "TensorboardExperiment") diff --git a/google/cloud/aiplatform/tensorboard/tensorboard_resource.py b/google/cloud/aiplatform/tensorboard/tensorboard_resource.py index 19b452166f..8890afa04e 100644 --- a/google/cloud/aiplatform/tensorboard/tensorboard_resource.py +++ b/google/cloud/aiplatform/tensorboard/tensorboard_resource.py @@ -15,13 +15,16 @@ # limitations under the License. # -from typing import Optional, Sequence, Dict, Tuple +from typing import Dict, List, Optional, Sequence, Tuple from google.auth import credentials as auth_credentials from google.protobuf import field_mask_pb2 from google.cloud.aiplatform import base from google.cloud.aiplatform.compat.types import tensorboard as gca_tensorboard +from google.cloud.aiplatform.compat.types import ( + tensorboard_experiment as gca_tensorboard_experiment, +) from google.cloud.aiplatform import initializer from google.cloud.aiplatform import utils @@ -176,12 +179,7 @@ def create( _LOGGER.log_create_complete(cls, created_tensorboard, "tb") - return cls( - tensorboard_name=created_tensorboard.name, - project=project or initializer.global_config.project, - location=location or initializer.global_config.location, - credentials=credentials, - ) + return cls(tensorboard_name=created_tensorboard.name, credentials=credentials,) def update( self, @@ -233,8 +231,7 @@ def update( Overrides encryption_spec_key_name set in aiplatform.init. Returns: - tensorboard (Tensorboard): - The managed tensorboard resource. + Tensorboard: The managed tensorboard resource. """ update_mask = list() @@ -310,12 +307,12 @@ def __init__( Example Usage: tb_exp = aiplatform.TensorboardExperiment( - tensorboard_experiment_name= "projects/123/locations/us-central1/tensorboards/456/experiments/678" + tensorboard_experiment_name= "projects/123/locations/us-central1/tensorboards/456/experiments/678" ) tb_exp = aiplatform.TensorboardExperiment( tensorboard_experiment_name= "678" - tensorboard_id = "456" + tensorboard_id = "456" ) Args: @@ -344,32 +341,32 @@ def __init__( ) self._gca_resource = self._get_gca_resource( resource_name=tensorboard_experiment_name, - parent_resource_name_fields={ - Tensorboard._resource_noun: tensorboard_id - } if tensorboard_id else tensorboard_id) + parent_resource_name_fields={Tensorboard._resource_noun: tensorboard_id} + if tensorboard_id + else tensorboard_id, + ) @classmethod def create( cls, - tensorboard_id: str, - tensorboard_experiment_id: Optional[str] = None, + tensorboard_experiment_id: str, + tensorboard_name: str, display_name: Optional[str] = None, description: Optional[str] = None, labels: Optional[Dict[str, str]] = None, project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, - request_metadata: Optional[Sequence[Tuple[str, str]]] = (), - encryption_spec_key_name: Optional[str] = None, - ) -> "Tensorboard": + request_metadata: Sequence[Tuple[str, str]] = (), + ) -> "TensorboardExperiment": """Creates a new tensorboard. Example Usage: tb = aiplatform.TensorboardExperiment.create( - display_name='my display name', - tensorboard_id='456' tensorboard_experiment_id='my-experiment' + tensorboard_id='456' + display_name='my display name', description='my description', labels={ 'key1': 'value1', @@ -378,13 +375,27 @@ def create( ) Args: + tensorboard_experiment_id (str): + Required. The ID to use for the Tensorboard experiment, + which will become the final component of the Tensorboard + experiment's resource name. + + This value should be 1-128 characters, and valid + characters are /[a-z][0-9]-/. + + This corresponds to the ``tensorboard_experiment_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tensorboard_name (str): + Required. The resource name or ID of the Tensorboard to create + the TensorboardExperiment in. Format of resource name: + ``projects/{project}/locations/{location}/tensorboards/{tensorboard}`` display_name (str): - Required. The user-defined name of the Tensorboard Experiment. + Optional. The user-defined name of the Tensorboard Experiment. The name can be up to 128 characters long and can be consist of any UTF-8 characters. - tensorboard_id description (str): - Optional. Description of this Tensorboard. + Optional. Description of this Tensorboard Experiment. labels (Dict[str, str]): Optional. Labels with user-defined metadata to organize your Tensorboards. Label keys and values can be no longer than 64 characters @@ -406,160 +417,105 @@ def create( credentials set in aiplatform.init. request_metadata (Sequence[Tuple[str, str]]): Optional. Strings which should be sent along with the request as metadata. - encryption_spec_key_name (str): - Optional. Cloud KMS resource identifier of the customer - managed encryption key used to protect the tensorboard. Has the - form: - ``projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key``. - The key needs to be in the same region as where the compute - resource is created. - - If set, this Tensorboard and all sub-resources of this Tensorboard will be secured by this key. - - Overrides encryption_spec_key_name set in aiplatform.init. - Returns: - tensorboard (Tensorboard): - Instantiated representation of the managed tensorboard resource. + TensorboardExperiment: The TensorboardExperiment resource. """ - utils.validate_display_name(display_name) + if display_name: + utils.validate_display_name(display_name) + if labels: utils.validate_labels(labels) api_client = cls._instantiate_client(location=location, credentials=credentials) - parent = initializer.global_config.common_location_path( - project=project, location=location + parent = utils.full_resource_name( + resource_name=tensorboard_name, + resource_noun=Tensorboard._resource_noun, + parse_resource_name_method=Tensorboard._parse_resource_name, + format_resource_name_method=Tensorboard._format_resource_name, + project=project, + location=location, ) - encryption_spec = initializer.global_config.get_encryption_spec( - encryption_spec_key_name=encryption_spec_key_name + gapic_tensorboard_experiment = gca_tensorboard_experiment.TensorboardExperiment( + display_name=display_name, description=description, labels=labels, ) - gapic_tensorboard = gca_tensorboard.Tensorboard( - display_name=display_name, - description=description, - labels=labels, - encryption_spec=encryption_spec, - ) + _LOGGER.log_create_with_lro(cls) - create_tensorboard_lro = api_client.create_tensorboard( - parent=parent, tensorboard=gapic_tensorboard, metadata=request_metadata + tensorboard_experiment = api_client.create_tensorboard_experiment( + parent=parent, + tensorboard_experiment=gapic_tensorboard_experiment, + tensorboard_experiment_id=tensorboard_experiment_id, + metadata=request_metadata, ) - _LOGGER.log_create_with_lro(cls, create_tensorboard_lro) - - created_tensorboard = create_tensorboard_lro.result() - - _LOGGER.log_create_complete(cls, created_tensorboard, "tb") + _LOGGER.log_create_complete(cls, tensorboard_experiment, "tb experiment") return cls( - tensorboard_name=created_tensorboard.name, - project=project or initializer.global_config.project, - location=location or initializer.global_config.location, + tensorboard_experiment_name=tensorboard_experiment.name, credentials=credentials, ) - def update( - self, - display_name: Optional[str] = None, - description: Optional[str] = None, - labels: Optional[Dict[str, str]] = None, - request_metadata: Optional[Sequence[Tuple[str, str]]] = (), - encryption_spec_key_name: Optional[str] = None, - ) -> "Tensorboard": - """Updates an existing tensorboard. + @classmethod + def list( + cls, + tensorboard_name: str, + filter: Optional[str] = None, + order_by: Optional[str] = None, + project: Optional[str] = None, + location: Optional[str] = None, + credentials: Optional[auth_credentials.Credentials] = None, + ) -> List["TensorboardExperiment"]: + """List all instances of this Vertex AI Resource. Example Usage: - tb = aiplatform.Tensorboard(tensorboard_name='123456') - tb.update( - display_name='update my display name', - description='update my description', + aiplatform.TensorboardExperiment.list( + tensorboard_name='projects/my-project/locations/us-central1/tensorboards/123' ) Args: - display_name (str): - Optional. User-defined name of the Tensorboard. - The name can be up to 128 characters long and can be consist - of any UTF-8 characters. - description (str): - Optional. Description of this Tensorboard. - labels (Dict[str, str]): - Optional. Labels with user-defined metadata to organize your Tensorboards. - Label keys and values can be no longer than 64 characters - (Unicode codepoints), can only contain lowercase letters, numeric - characters, underscores and dashes. International characters are allowed. - No more than 64 user labels can be associated with one Tensorboard - (System labels are excluded). - See https://goo.gl/xmQnxf for more information and examples of labels. - System reserved label keys are prefixed with "aiplatform.googleapis.com/" - and are immutable. - request_metadata (Sequence[Tuple[str, str]]): - Optional. Strings which should be sent along with the request as metadata. - encryption_spec_key_name (str): - Optional. Cloud KMS resource identifier of the customer - managed encryption key used to protect the tensorboard. Has the - form: - ``projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key``. - The key needs to be in the same region as where the compute - resource is created. - - If set, this Tensorboard and all sub-resources of this Tensorboard will be secured by this key. - - Overrides encryption_spec_key_name set in aiplatform.init. - + tensorboard_name(str): + Required. The resource name or resource ID of the + Tensorboard to list + TensorboardExperiments. Format, if resource name: + 'projects/{project}/locations/{location}/tensorboards/{tensorboard}' + filter (str): + Optional. An expression for filtering the results of the request. + For field names both snake_case and camelCase are supported. + order_by (str): + Optional. A comma-separated list of fields to order by, sorted in + ascending order. Use "desc" after a field name for descending. + Supported fields: `display_name`, `create_time`, `update_time` + project (str): + Optional. Project to retrieve list from. If not set, project + set in aiplatform.init will be used. + location (str): + Optional. Location to retrieve list from. If not set, location + set in aiplatform.init will be used. + credentials (auth_credentials.Credentials): + Optional. Custom credentials to use to retrieve list. Overrides + credentials set in aiplatform.init. Returns: - tensorboard (Tensorboard): - The managed tensorboard resource. + List[TensorboardExperiment] - A list of TensorboardExperiments """ - update_mask = list() - if display_name: - utils.validate_display_name(display_name) - update_mask.append("display_name") - - if description: - update_mask.append("description") - - if labels: - utils.validate_labels(labels) - update_mask.append("labels") - - encryption_spec = None - if encryption_spec_key_name: - encryption_spec = initializer.global_config.get_encryption_spec( - encryption_spec_key_name=encryption_spec_key_name, - ) - update_mask.append("encryption_spec") - - update_mask = field_mask_pb2.FieldMask(paths=update_mask) - - gapic_tensorboard = gca_tensorboard.Tensorboard( - name=self.resource_name, - display_name=display_name, - description=description, - labels=labels, - encryption_spec=encryption_spec, - ) - - _LOGGER.log_action_start_against_resource( - "Updating", "tensorboard", self, - ) - - update_tensorboard_lro = self.api_client.update_tensorboard( - tensorboard=gapic_tensorboard, - update_mask=update_mask, - metadata=request_metadata, + parent = utils.full_resource_name( + resource_name=tensorboard_name, + resource_noun=Tensorboard._resource_noun, + parse_resource_name_method=Tensorboard._parse_resource_name, + format_resource_name_method=Tensorboard._format_resource_name, + project=project, + location=location, ) - _LOGGER.log_action_started_against_resource_with_lro( - "Update", "tensorboard", self.__class__, update_tensorboard_lro + return super()._list( + filter=filter, + order_by=order_by, + project=project, + location=location, + credentials=credentials, + parent=parent, ) - - update_tensorboard_lro.result() - - _LOGGER.log_action_completed_against_resource("tensorboard", "updated", self) - - return self diff --git a/tests/system/aiplatform/test_tensorboard.py b/tests/system/aiplatform/test_tensorboard.py index 9ec8179ca5..ae4b5f7bb1 100644 --- a/tests/system/aiplatform/test_tensorboard.py +++ b/tests/system/aiplatform/test_tensorboard.py @@ -42,3 +42,25 @@ def test_create_and_get_tensorboard(self, shared_state): list_tb = aiplatform.Tensorboard.list() assert len(list_tb) > 0 + + tb_experiment = aiplatform.TensorboardExperiment.create( + tensorboard_experiment_id="vertex-sdk-e2e-test-experiment", + tensorboard_name=tb.resource_name, + display_name=self._make_display_name("tensorboard_experiment"), + description="Vertex SDK Integration test.", + labels={"test": "labels"}, + ) + + shared_state["resources"].append(tb_experiment) + + get_tb_experiment = aiplatform.TensorboardExperiment( + tb_experiment.resource_name + ) + + assert tb_experiment.resource_name == get_tb_experiment.resource_name + + list_tb_experiment = aiplatform.TensorboardExperiment.list( + tensorboard_name=tb.resource_name + ) + + assert len(list_tb_experiment) > 0 diff --git a/tests/unit/aiplatform/test_tensorboard.py b/tests/unit/aiplatform/test_tensorboard.py index 38ea935950..221b8afafa 100644 --- a/tests/unit/aiplatform/test_tensorboard.py +++ b/tests/unit/aiplatform/test_tensorboard.py @@ -37,9 +37,10 @@ ) from google.cloud.aiplatform_v1.types import ( + encryption_spec as gca_encryption_spec, tensorboard as gca_tensorboard, + tensorboard_experiment as gca_tensorboard_experiment, tensorboard_service as gca_tensorboard_service, - encryption_spec as gca_encryption_spec, ) from google.protobuf import field_mask_pb2 @@ -66,6 +67,11 @@ ) _TEST_INVALID_NAME = f"prj/{_TEST_PROJECT}/locations/{_TEST_LOCATION}/{_TEST_ID}" +_TEST_TENSORBOARD_EXPERIMENT_ID = "test-experiment" +_TEST_TENSORBOARD_EXPERIMENT_NAME = ( + f"{_TEST_NAME}/experiments/{_TEST_TENSORBOARD_EXPERIMENT_ID}" +) + # request_metadata _TEST_REQUEST_METADATA = () @@ -132,6 +138,60 @@ def delete_tensorboard_mock(): yield delete_tensorboard_mock +@pytest.fixture +def get_tensorboard_experiment_mock(): + with patch.object( + tensorboard_service_client.TensorboardServiceClient, + "get_tensorboard_experiment", + ) as get_tensorboard_experiment__mock: + get_tensorboard_experiment__mock.return_value = gca_tensorboard_experiment.TensorboardExperiment( + name=_TEST_TENSORBOARD_EXPERIMENT_NAME, display_name=_TEST_DISPLAY_NAME, + ) + yield get_tensorboard_experiment__mock + + +@pytest.fixture +def create_tensorboard_experiment_mock(): + with patch.object( + tensorboard_service_client.TensorboardServiceClient, + "create_tensorboard_experiment", + ) as create_tensorboard_experiment_mock: + create_tensorboard_experiment_mock.return_value = gca_tensorboard_experiment.TensorboardExperiment( + name=_TEST_TENSORBOARD_EXPERIMENT_NAME, display_name=_TEST_DISPLAY_NAME, + ) + yield create_tensorboard_experiment_mock + + +@pytest.fixture +def delete_tensorboard_experiment_mock(): + with mock.patch.object( + tensorboard_service_client.TensorboardServiceClient, + "delete_tensorboard_experiment", + ) as delete_tensorboard_experiment_mock: + delete_tensorboard_lro_experiment_mock = mock.Mock(operation.Operation) + delete_tensorboard_lro_experiment_mock.result.return_value = gca_tensorboard_service.DeleteTensorboardExperimentRequest( + name=_TEST_TENSORBOARD_EXPERIMENT_NAME, + ) + delete_tensorboard_experiment_mock.return_value = ( + delete_tensorboard_lro_experiment_mock + ) + yield delete_tensorboard_experiment_mock + + +@pytest.fixture +def list_tensorboard_experiment_mock(): + with patch.object( + tensorboard_service_client.TensorboardServiceClient, + "list_tensorboard_experiments", + ) as list_tensorboard_experiment_mock: + list_tensorboard_experiment_mock.return_value = [ + gca_tensorboard_experiment.TensorboardExperiment( + name=_TEST_TENSORBOARD_EXPERIMENT_NAME, display_name=_TEST_DISPLAY_NAME, + ) + ] + yield list_tensorboard_experiment_mock + + class TestTensorboard: def setup_method(self): reload(initializer) @@ -300,3 +360,99 @@ def test_update_tensorboard_encryption_spec(self, update_tensorboard_mock): tensorboard=expected_tensorboard, metadata=_TEST_REQUEST_METADATA, ) + + +class TestTensorboardExperiment: + def setup_method(self): + reload(initializer) + reload(aiplatform) + + def teardown_method(self): + initializer.global_pool.shutdown(wait=True) + + def test_init_tensorboard_experiment(self, get_tensorboard_experiment_mock): + aiplatform.init(project=_TEST_PROJECT) + tensorboard.TensorboardExperiment( + tensorboard_experiment_name=_TEST_TENSORBOARD_EXPERIMENT_NAME + ) + get_tensorboard_experiment_mock.assert_called_once_with( + name=_TEST_TENSORBOARD_EXPERIMENT_NAME, retry=base._DEFAULT_RETRY + ) + + def test_init_tensorboard_experiment_with_tensorboard( + self, get_tensorboard_experiment_mock + ): + aiplatform.init(project=_TEST_PROJECT) + tensorboard.TensorboardExperiment( + tensorboard_experiment_name=_TEST_TENSORBOARD_EXPERIMENT_ID, + tensorboard_id=_TEST_ID, + ) + get_tensorboard_experiment_mock.assert_called_once_with( + name=_TEST_TENSORBOARD_EXPERIMENT_NAME, retry=base._DEFAULT_RETRY + ) + + def test_init_tensorboard_experiment_with_id_only_with_project_and_location( + self, get_tensorboard_experiment_mock + ): + aiplatform.init(project=_TEST_PROJECT) + tensorboard.TensorboardExperiment( + tensorboard_experiment_name=_TEST_TENSORBOARD_EXPERIMENT_ID, + tensorboard_id=_TEST_ID, + project=_TEST_PROJECT, + location=_TEST_LOCATION, + ) + get_tensorboard_experiment_mock.assert_called_once_with( + name=_TEST_TENSORBOARD_EXPERIMENT_NAME, retry=base._DEFAULT_RETRY + ) + + def test_create_tensorboard_experiment( + self, create_tensorboard_experiment_mock, get_tensorboard_experiment_mock + ): + + aiplatform.init(project=_TEST_PROJECT,) + + tensorboard.TensorboardExperiment.create( + tensorboard_experiment_id=_TEST_TENSORBOARD_EXPERIMENT_ID, + tensorboard_name=_TEST_NAME, + display_name=_TEST_DISPLAY_NAME, + ) + + expected_tensorboard_experiment = gca_tensorboard_experiment.TensorboardExperiment( + display_name=_TEST_DISPLAY_NAME, + ) + + create_tensorboard_experiment_mock.assert_called_once_with( + parent=_TEST_NAME, + tensorboard_experiment=expected_tensorboard_experiment, + tensorboard_experiment_id=_TEST_TENSORBOARD_EXPERIMENT_ID, + metadata=_TEST_REQUEST_METADATA, + ) + + get_tensorboard_experiment_mock.assert_called_once_with( + name=_TEST_TENSORBOARD_EXPERIMENT_NAME, retry=base._DEFAULT_RETRY + ) + + @pytest.mark.usefixtures("get_tensorboard_experiment_mock") + def test_delete_tensorboard_experiement(self, delete_tensorboard_experiment_mock): + aiplatform.init(project=_TEST_PROJECT) + + my_tensorboard_experiment = tensorboard.TensorboardExperiment( + tensorboard_experiment_name=_TEST_TENSORBOARD_EXPERIMENT_NAME + ) + + my_tensorboard_experiment.delete() + + delete_tensorboard_experiment_mock.assert_called_once_with( + name=my_tensorboard_experiment.resource_name + ) + + def test_list_tensorboard_experiments(self, list_tensorboard_experiment_mock): + aiplatform.init(project=_TEST_PROJECT) + + tensorboard.TensorboardExperiment.list( + tensorboard_name=_TEST_NAME + ) + + list_tensorboard_experiment_mock.assert_called_once_with( + request={"parent": _TEST_NAME, "filter": None} + ) From 6ca3329bcb0a9ed6ed44463a70179f7d7abb67ba Mon Sep 17 00:00:00 2001 From: Sasha Sobran Date: Wed, 15 Dec 2021 14:25:32 -0500 Subject: [PATCH 10/11] lint --- tests/unit/aiplatform/test_tensorboard.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/tests/unit/aiplatform/test_tensorboard.py b/tests/unit/aiplatform/test_tensorboard.py index 221b8afafa..5faa541186 100644 --- a/tests/unit/aiplatform/test_tensorboard.py +++ b/tests/unit/aiplatform/test_tensorboard.py @@ -449,9 +449,7 @@ def test_delete_tensorboard_experiement(self, delete_tensorboard_experiment_mock def test_list_tensorboard_experiments(self, list_tensorboard_experiment_mock): aiplatform.init(project=_TEST_PROJECT) - tensorboard.TensorboardExperiment.list( - tensorboard_name=_TEST_NAME - ) + tensorboard.TensorboardExperiment.list(tensorboard_name=_TEST_NAME) list_tensorboard_experiment_mock.assert_called_once_with( request={"parent": _TEST_NAME, "filter": None} From 4a7fc86537336606dd986916893dd4667f352570 Mon Sep 17 00:00:00 2001 From: Sasha Sobran Date: Wed, 15 Dec 2021 14:27:04 -0500 Subject: [PATCH 11/11] docs: Update docstrings --- google/cloud/aiplatform/tensorboard/tensorboard_resource.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/google/cloud/aiplatform/tensorboard/tensorboard_resource.py b/google/cloud/aiplatform/tensorboard/tensorboard_resource.py index 8890afa04e..96159db211 100644 --- a/google/cloud/aiplatform/tensorboard/tensorboard_resource.py +++ b/google/cloud/aiplatform/tensorboard/tensorboard_resource.py @@ -359,7 +359,7 @@ def create( credentials: Optional[auth_credentials.Credentials] = None, request_metadata: Sequence[Tuple[str, str]] = (), ) -> "TensorboardExperiment": - """Creates a new tensorboard. + """Creates a new TensorboardExperiment. Example Usage: @@ -468,7 +468,7 @@ def list( location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, ) -> List["TensorboardExperiment"]: - """List all instances of this Vertex AI Resource. + """List TensorboardExperiemnts in a Tensorboard resource. Example Usage: