diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index b1ed9d116a..bb192279f6 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -26,3 +26,6 @@ CHANGELOG.md @googleapis/cloud-aiplatform-model-builder-sdk @googleapis/cdpe-cloudai README.rst @googleapis/cloud-aiplatform-model-builder-sdk @googleapis/cdpe-cloudai setup.py @googleapis/cloud-aiplatform-model-builder-sdk @googleapis/cdpe-cloudai + +# Vertex AI product team-specific ownership +/google/cloud/aiplatform/constants/prediction.py @googleapis/vertex-prediction-team diff --git a/google/cloud/aiplatform/__init__.py b/google/cloud/aiplatform/__init__.py index a07296378e..76d9b6efe9 100644 --- a/google/cloud/aiplatform/__init__.py +++ b/google/cloud/aiplatform/__init__.py @@ -53,6 +53,7 @@ AutoMLTextTrainingJob, AutoMLVideoTrainingJob, ) +from google.cloud.aiplatform import helpers """ Usage: @@ -73,6 +74,7 @@ "explain", "gapic", "init", + "helpers", "hyperparameter_tuning", "log_params", "log_metrics", diff --git a/google/cloud/aiplatform/constants/__init__.py b/google/cloud/aiplatform/constants/__init__.py new file mode 100644 index 0000000000..95f437a335 --- /dev/null +++ b/google/cloud/aiplatform/constants/__init__.py @@ -0,0 +1,18 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.cloud.aiplatform.constants import base +from google.cloud.aiplatform.constants import prediction + +__all__ = ("base", "prediction") diff --git a/google/cloud/aiplatform/constants.py b/google/cloud/aiplatform/constants/base.py similarity index 100% rename from google/cloud/aiplatform/constants.py rename to google/cloud/aiplatform/constants/base.py diff --git a/google/cloud/aiplatform/constants/prediction.py b/google/cloud/aiplatform/constants/prediction.py new file mode 100644 index 0000000000..682978ea58 --- /dev/null +++ b/google/cloud/aiplatform/constants/prediction.py @@ -0,0 +1,138 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import re + +from collections import defaultdict + +# [region]-docker.pkg.dev/vertex-ai/prediction/[framework]-[accelerator].[version]:latest +CONTAINER_URI_PATTERN = re.compile( + r"(?P[\w]+)\-docker\.pkg\.dev\/vertex\-ai\/prediction\/" + r"(?P[\w]+)\-(?P[\w]+)\.(?P[\d-]+):latest" +) + +SKLEARN = "sklearn" +TF = "tf" +TF2 = "tf2" +XGBOOST = "xgboost" + +XGBOOST_CONTAINER_URIS = [ + "us-docker.pkg.dev/vertex-ai/prediction/xgboost-cpu.1-4:latest", + "europe-docker.pkg.dev/vertex-ai/prediction/xgboost-cpu.1-4:latest", + "asia-docker.pkg.dev/vertex-ai/prediction/xgboost-cpu.1-4:latest", + "us-docker.pkg.dev/vertex-ai/prediction/xgboost-cpu.1-3:latest", + "europe-docker.pkg.dev/vertex-ai/prediction/xgboost-cpu.1-3:latest", + "asia-docker.pkg.dev/vertex-ai/prediction/xgboost-cpu.1-3:latest", + "us-docker.pkg.dev/vertex-ai/prediction/xgboost-cpu.1-2:latest", + "europe-docker.pkg.dev/vertex-ai/prediction/xgboost-cpu.1-2:latest", + "asia-docker.pkg.dev/vertex-ai/prediction/xgboost-cpu.1-2:latest", + "us-docker.pkg.dev/vertex-ai/prediction/xgboost-cpu.1-1:latest", + "europe-docker.pkg.dev/vertex-ai/prediction/xgboost-cpu.1-1:latest", + "asia-docker.pkg.dev/vertex-ai/prediction/xgboost-cpu.1-1:latest", + "us-docker.pkg.dev/vertex-ai/prediction/xgboost-cpu.0-90:latest", + "europe-docker.pkg.dev/vertex-ai/prediction/xgboost-cpu.0-90:latest", + "asia-docker.pkg.dev/vertex-ai/prediction/xgboost-cpu.0-90:latest", + "us-docker.pkg.dev/vertex-ai/prediction/xgboost-cpu.0-82:latest", + "europe-docker.pkg.dev/vertex-ai/prediction/xgboost-cpu.0-82:latest", + "asia-docker.pkg.dev/vertex-ai/prediction/xgboost-cpu.0-82:latest", +] + +SKLEARN_CONTAINER_URIS = [ + "us-docker.pkg.dev/vertex-ai/prediction/sklearn-cpu.1-0:latest", + "europe-docker.pkg.dev/vertex-ai/prediction/sklearn-cpu.1-0:latest", + "asia-docker.pkg.dev/vertex-ai/prediction/sklearn-cpu.1-0:latest", + "us-docker.pkg.dev/vertex-ai/prediction/sklearn-cpu.0-24:latest", + "europe-docker.pkg.dev/vertex-ai/prediction/sklearn-cpu.0-24:latest", + "asia-docker.pkg.dev/vertex-ai/prediction/sklearn-cpu.0-24:latest", + "us-docker.pkg.dev/vertex-ai/prediction/sklearn-cpu.0-23:latest", + "europe-docker.pkg.dev/vertex-ai/prediction/sklearn-cpu.0-23:latest", + "asia-docker.pkg.dev/vertex-ai/prediction/sklearn-cpu.0-23:latest", + "us-docker.pkg.dev/vertex-ai/prediction/sklearn-cpu.0-22:latest", + "europe-docker.pkg.dev/vertex-ai/prediction/sklearn-cpu.0-22:latest", + "asia-docker.pkg.dev/vertex-ai/prediction/sklearn-cpu.0-22:latest", + "us-docker.pkg.dev/vertex-ai/prediction/sklearn-cpu.0-20:latest", + "europe-docker.pkg.dev/vertex-ai/prediction/sklearn-cpu.0-20:latest", + "asia-docker.pkg.dev/vertex-ai/prediction/sklearn-cpu.0-20:latest", +] + +TF_CONTAINER_URIS = [ + "us-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-7:latest", + "europe-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-7:latest", + "asia-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-7:latest", + "us-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-7:latest", + "europe-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-7:latest", + "asia-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-7:latest", + "us-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-6:latest", + "europe-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-6:latest", + "asia-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-6:latest", + "us-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-6:latest", + "europe-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-6:latest", + "asia-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-6:latest", + "us-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-5:latest", + "europe-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-5:latest", + "asia-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-5:latest", + "us-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-5:latest", + "europe-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-5:latest", + "asia-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-5:latest", + "us-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-4:latest", + "europe-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-4:latest", + "asia-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-4:latest", + "us-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-4:latest", + "europe-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-4:latest", + "asia-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-4:latest", + "us-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-3:latest", + "europe-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-3:latest", + "asia-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-3:latest", + "us-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-3:latest", + "europe-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-3:latest", + "asia-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-3:latest", + "us-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-2:latest", + "europe-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-2:latest", + "asia-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-2:latest", + "us-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-2:latest", + "europe-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-2:latest", + "asia-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-2:latest", + "us-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-1:latest", + "europe-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-1:latest", + "asia-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-1:latest", + "us-docker.pkg.dev/vertex-ai/prediction/tf-cpu.1-15:latest", + "europe-docker.pkg.dev/vertex-ai/prediction/tf-cpu.1-15:latest", + "asia-docker.pkg.dev/vertex-ai/prediction/tf-cpu.1-15:latest", + "us-docker.pkg.dev/vertex-ai/prediction/tf-gpu.1-15:latest", + "europe-docker.pkg.dev/vertex-ai/prediction/tf-gpu.1-15:latest", + "asia-docker.pkg.dev/vertex-ai/prediction/tf-gpu.1-15:latest", +] + +SERVING_CONTAINER_URIS = ( + SKLEARN_CONTAINER_URIS + TF_CONTAINER_URIS + XGBOOST_CONTAINER_URIS +) + +# Map of all first-party prediction containers +d = defaultdict(lambda: defaultdict(lambda: defaultdict(lambda: defaultdict(str)))) + +for container_uri in SERVING_CONTAINER_URIS: + m = CONTAINER_URI_PATTERN.match(container_uri) + region, framework, accelerator, version = m[1], m[2], m[3], m[4] + version = version.replace("-", ".") + + if framework in (TF2, TF): # Store both `tf`, `tf2` as `tensorflow` + framework = "tensorflow" + + d[region][framework][accelerator][version] = container_uri + +_SERVING_CONTAINER_URI_MAP = d + +_SERVING_CONTAINER_DOCUMENTATION_URL = ( + "https://cloud.google.com/vertex-ai/docs/predictions/pre-built-containers" +) diff --git a/google/cloud/aiplatform/gapic/schema/__init__.py b/google/cloud/aiplatform/gapic/schema/__init__.py index e726749c77..5d31a70f1f 100644 --- a/google/cloud/aiplatform/gapic/schema/__init__.py +++ b/google/cloud/aiplatform/gapic/schema/__init__.py @@ -14,7 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from google.cloud.aiplatform.helpers import _decorators +from google.cloud.aiplatform.utils.enhanced_library import _decorators from google.cloud.aiplatform.v1.schema import predict from google.cloud.aiplatform.v1.schema import trainingjob from google.cloud.aiplatform.v1beta1.schema import predict as predict_v1beta1 diff --git a/google/cloud/aiplatform/helpers/__init__.py b/google/cloud/aiplatform/helpers/__init__.py index 3f031f2bb4..e5fa8f665d 100644 --- a/google/cloud/aiplatform/helpers/__init__.py +++ b/google/cloud/aiplatform/helpers/__init__.py @@ -1,3 +1,21 @@ -from google.cloud.aiplatform.helpers import value_converter +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. -__all__ = (value_converter,) +from google.cloud.aiplatform.helpers import container_uri_builders + +get_prebuilt_prediction_container_uri = ( + container_uri_builders.get_prebuilt_prediction_container_uri +) + +__all__ = "get_prebuilt_prediction_container_uri" diff --git a/google/cloud/aiplatform/helpers/container_uri_builders.py b/google/cloud/aiplatform/helpers/container_uri_builders.py new file mode 100644 index 0000000000..6b49d3e230 --- /dev/null +++ b/google/cloud/aiplatform/helpers/container_uri_builders.py @@ -0,0 +1,109 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Optional + +from google.cloud.aiplatform.constants import prediction +from google.cloud.aiplatform import initializer + + +def get_prebuilt_prediction_container_uri( + framework: str, + framework_version: str, + region: Optional[str] = None, + accelerator: str = "cpu", +) -> str: + """ + Get a Vertex AI pre-built prediction Docker container URI for + a given framework, version, region, and accelerator use. + + Example usage: + ``` + uri = aiplatform.helpers.get_prebuilt_prediction_container_uri( + framework="tensorflow", + framework_version="2.6", + accelerator="gpu" + ) + + model = aiplatform.Model.upload( + display_name="boston_housing_", + artifact_uri="gs://my-bucket/my-model/", + serving_container_image_uri=uri + ) + ``` + + Args: + framework (str): + Required. The ML framework of the pre-built container. For example, + `"tensorflow"`, `"xgboost"`, or `"sklearn"` + framework_version (str): + Required. The version of the specified ML framework as a string. + region (str): + Optional. AI region or multi-region. Used to select the correct + Artifact Registry multi-region repository and reduce latency. + Must start with `"us"`, `"asia"` or `"europe"`. + Default is location set by `aiplatform.init()`. + accelerator (str): + Optional. The type of accelerator support provided by container. For + example: `"cpu"` or `"gpu"` + Default is `"cpu"`. + + Returns: + uri (str): + A Vertex AI prediction container URI + + Raises: + ValueError: If containers for provided framework are unavailable or the + container does not support the specified version, accelerator, or region. + """ + URI_MAP = prediction._SERVING_CONTAINER_URI_MAP + DOCS_URI_MESSAGE = ( + f"See {prediction._SERVING_CONTAINER_DOCUMENTATION_URL} " + "for complete list of supported containers" + ) + + # If region not provided, use initializer location + region = region or initializer.global_config.location + region = region.split("-", 1)[0] + framework = framework.lower() + + if not URI_MAP.get(region): + raise ValueError( + f"Unsupported container region `{region}`, supported regions are " + f"{', '.join(URI_MAP.keys())}. " + f"{DOCS_URI_MESSAGE}" + ) + + if not URI_MAP[region].get(framework): + raise ValueError( + f"No containers found for framework `{framework}`. Supported frameworks are " + f"{', '.join(URI_MAP[region].keys())} {DOCS_URI_MESSAGE}" + ) + + if not URI_MAP[region][framework].get(accelerator): + raise ValueError( + f"{framework} containers do not support `{accelerator}` accelerator. Supported accelerators " + f"are {', '.join(URI_MAP[region][framework].keys())}. {DOCS_URI_MESSAGE}" + ) + + final_uri = URI_MAP[region][framework][accelerator].get(framework_version) + + if not final_uri: + raise ValueError( + f"No serving container for `{framework}` version `{framework_version}` " + f"with accelerator `{accelerator}` found. Supported versions " + f"include {', '.join(URI_MAP[region][framework][accelerator].keys())}. {DOCS_URI_MESSAGE}" + ) + + return final_uri diff --git a/google/cloud/aiplatform/initializer.py b/google/cloud/aiplatform/initializer.py index ea1a51c8a7..00f6b19b40 100644 --- a/google/cloud/aiplatform/initializer.py +++ b/google/cloud/aiplatform/initializer.py @@ -29,7 +29,7 @@ from google.auth.exceptions import GoogleAuthError from google.cloud.aiplatform import compat -from google.cloud.aiplatform import constants +from google.cloud.aiplatform.constants import base as constants from google.cloud.aiplatform import utils from google.cloud.aiplatform.metadata import metadata diff --git a/google/cloud/aiplatform/jobs.py b/google/cloud/aiplatform/jobs.py index eb593c70bf..57958fc779 100644 --- a/google/cloud/aiplatform/jobs.py +++ b/google/cloud/aiplatform/jobs.py @@ -42,7 +42,7 @@ machine_resources as gca_machine_resources_compat, study as gca_study_compat, ) -from google.cloud.aiplatform import constants +from google.cloud.aiplatform.constants import base as constants from google.cloud.aiplatform import initializer from google.cloud.aiplatform import hyperparameter_tuning from google.cloud.aiplatform import utils diff --git a/google/cloud/aiplatform/tensorboard/uploader_main.py b/google/cloud/aiplatform/tensorboard/uploader_main.py index 304e46cfb7..ba259d6388 100644 --- a/google/cloud/aiplatform/tensorboard/uploader_main.py +++ b/google/cloud/aiplatform/tensorboard/uploader_main.py @@ -31,6 +31,7 @@ from google.api_core import exceptions from google.cloud import storage from google.cloud import aiplatform +from google.cloud.aiplatform.constants import base as constants from google.cloud.aiplatform import jobs from google.cloud.aiplatform.tensorboard import uploader from google.cloud.aiplatform.utils import TensorboardClientWithOverride @@ -91,7 +92,7 @@ def main(argv): if len(argv) > 1: raise app.UsageError("Too many command-line arguments.") - aiplatform.constants.API_BASE_PATH = FLAGS.api_uri + constants.API_BASE_PATH = FLAGS.api_uri m = re.match( "projects/(.*)/locations/(.*)/tensorboards/.*", FLAGS.tensorboard_resource_name ) diff --git a/google/cloud/aiplatform/training_jobs.py b/google/cloud/aiplatform/training_jobs.py index 679bb277ab..4afd4920db 100644 --- a/google/cloud/aiplatform/training_jobs.py +++ b/google/cloud/aiplatform/training_jobs.py @@ -23,7 +23,7 @@ from google.auth import credentials as auth_credentials from google.cloud.aiplatform import base -from google.cloud.aiplatform import constants +from google.cloud.aiplatform.constants import base as constants from google.cloud.aiplatform import datasets from google.cloud.aiplatform import initializer from google.cloud.aiplatform import models diff --git a/google/cloud/aiplatform/utils/__init__.py b/google/cloud/aiplatform/utils/__init__.py index 379ebfc179..7d49d57c1e 100644 --- a/google/cloud/aiplatform/utils/__init__.py +++ b/google/cloud/aiplatform/utils/__init__.py @@ -30,7 +30,7 @@ from google.cloud import storage from google.cloud.aiplatform import compat -from google.cloud.aiplatform import constants +from google.cloud.aiplatform.constants import base as constants from google.cloud.aiplatform import initializer from google.cloud.aiplatform.compat.services import ( diff --git a/google/cloud/aiplatform/utils/enhanced_library/__init__.py b/google/cloud/aiplatform/utils/enhanced_library/__init__.py new file mode 100644 index 0000000000..7e1ec16ec8 --- /dev/null +++ b/google/cloud/aiplatform/utils/enhanced_library/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/google/cloud/aiplatform/helpers/_decorators.py b/google/cloud/aiplatform/utils/enhanced_library/_decorators.py similarity index 97% rename from google/cloud/aiplatform/helpers/_decorators.py rename to google/cloud/aiplatform/utils/enhanced_library/_decorators.py index 95aac31c4f..43e395393b 100644 --- a/google/cloud/aiplatform/helpers/_decorators.py +++ b/google/cloud/aiplatform/utils/enhanced_library/_decorators.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import -from google.cloud.aiplatform.helpers import value_converter +from google.cloud.aiplatform.utils.enhanced_library import value_converter from proto.marshal import Marshal from proto.marshal.rules.struct import ValueRule diff --git a/google/cloud/aiplatform/helpers/value_converter.py b/google/cloud/aiplatform/utils/enhanced_library/value_converter.py similarity index 100% rename from google/cloud/aiplatform/helpers/value_converter.py rename to google/cloud/aiplatform/utils/enhanced_library/value_converter.py diff --git a/tests/unit/aiplatform/test_datasets.py b/tests/unit/aiplatform/test_datasets.py index f80beac240..71ca5907ab 100644 --- a/tests/unit/aiplatform/test_datasets.py +++ b/tests/unit/aiplatform/test_datasets.py @@ -30,6 +30,7 @@ from google.cloud import aiplatform from google.cloud.aiplatform import base from google.cloud.aiplatform import compat +from google.cloud.aiplatform.constants import base as constants from google.cloud.aiplatform import datasets from google.cloud.aiplatform import initializer from google.cloud.aiplatform import schema @@ -514,7 +515,7 @@ def test_init_dataset_with_alt_location(self, get_dataset_tabular_gcs_mock): assert ( ds.api_client._clients[compat.DEFAULT_VERSION]._client_options.api_endpoint - == f"{_TEST_LOCATION}-{aiplatform.constants.API_BASE_PATH}" + == f"{_TEST_LOCATION}-{constants.API_BASE_PATH}" ) assert _TEST_ALT_LOCATION != _TEST_LOCATION diff --git a/tests/unit/aiplatform/test_helpers.py b/tests/unit/aiplatform/test_helpers.py new file mode 100644 index 0000000000..4d9afe6fa5 --- /dev/null +++ b/tests/unit/aiplatform/test_helpers.py @@ -0,0 +1,149 @@ +# -*- coding: utf-8 -*- + +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import importlib +import pytest + +from typing import Sequence + +from google.cloud import aiplatform +from google.cloud.aiplatform import helpers +from google.cloud.aiplatform import initializer + + +class TestContainerUriHelpers: + def setup_method(self): + importlib.reload(initializer) + importlib.reload(aiplatform) + + def teardown_method(self): + initializer.global_pool.shutdown(wait=True) + + def _build_predict_uri_kwargs(self, args: Sequence[str]) -> dict: + """ + Takes list of values for all method parameters and return dict of kwargs, + dropping keywords that were set as None. + """ + func = helpers.get_prebuilt_prediction_container_uri + arg_names = func.__code__.co_varnames[: func.__code__.co_argcount] + return {k: v for k, v in dict(zip(arg_names, args)).items() if v is not None} + + @pytest.mark.parametrize( + "args, expected_uri", + [ + ( + ("tensorflow", "2.6", None, None), + "us-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-6:latest", + ), + ( + ("tensorflow", "1.15", "europe-west4", None), + "europe-docker.pkg.dev/vertex-ai/prediction/tf-cpu.1-15:latest", + ), + ( + ("tensorflow", "2.2", None, "gpu"), + "us-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-2:latest", + ), + ( + ("sklearn", "0.24", "asia", "cpu"), + "asia-docker.pkg.dev/vertex-ai/prediction/sklearn-cpu.0-24:latest", + ), + ( + ("sklearn", "0.20", None, None), + "us-docker.pkg.dev/vertex-ai/prediction/sklearn-cpu.0-20:latest", + ), + ( + ("xgboost", "1.3", None, None), + "us-docker.pkg.dev/vertex-ai/prediction/xgboost-cpu.1-3:latest", + ), + ( + ("xgboost", "0.90", "europe", None), + "europe-docker.pkg.dev/vertex-ai/prediction/xgboost-cpu.0-90:latest", + ), + ], + ) + def test_correct_prediction_uri_args(self, args, expected_uri): + uri = helpers.get_prebuilt_prediction_container_uri( + **self._build_predict_uri_kwargs(args) + ) + + assert uri == expected_uri + + def test_correct_prediction_uri_args_with_init_location(self): + """ + Ensure that aiplatform.init location is used when region + is not provided + """ + uri = aiplatform.helpers.get_prebuilt_prediction_container_uri( + "tensorflow", "2.6" + ) + # SDK default location is us-central1 + assert uri.startswith("us-docker.pkg.dev") + + aiplatform.init(location="asia-northeast3") + uri = aiplatform.helpers.get_prebuilt_prediction_container_uri( + "tensorflow", "2.6" + ) + assert uri.startswith("asia-docker.pkg.dev") + + aiplatform.init(location="europe-west2") + uri = aiplatform.helpers.get_prebuilt_prediction_container_uri( + "xgboost", "0.90" + ) + assert uri.startswith("europe-docker.pkg.dev") + + @pytest.mark.parametrize( + "args, expected_error_msg", + [ + ( + ("pytorch", "1.10", None, None), + "No containers found for framework `pytorch`. Supported frameworks are", + ), + ( + ("tensorflow", "9.15", None, None), + ( + "No serving container for `tensorflow` version `9.15` with accelerator " + "`cpu` found. Supported versions include" + ), + ), + ( + # Make sure region error supercedes version error + ("tensorflow", "9.15", "pluto", None), + "Unsupported container region `pluto`, supported regions are ", + ), + ( + ("tensorflow", "2.2", "narnia", None), + "Unsupported container region `narnia`, supported regions are ", + ), + ( + ("sklearn", "0.24", "asia", "gpu"), + "sklearn containers do not support `gpu` accelerator. Supported accelerators are cpu.", + ), + ( + # Make sure framework error supercedes accelerator error + ("onnx", "1.9", None, "gpu"), + "No containers found for framework `onnx`. Supported frameworks are", + ), + ], + ) + def test_invalid_prediction_uri_args(self, args, expected_error_msg): + + with pytest.raises(ValueError) as err: + helpers.get_prebuilt_prediction_container_uri( + **self._build_predict_uri_kwargs(args) + ) + + assert err.match(expected_error_msg) diff --git a/tests/unit/aiplatform/test_initializer.py b/tests/unit/aiplatform/test_initializer.py index 7e65c99b4c..f4043a5eba 100644 --- a/tests/unit/aiplatform/test_initializer.py +++ b/tests/unit/aiplatform/test_initializer.py @@ -26,7 +26,7 @@ from google.cloud.aiplatform import initializer from google.cloud.aiplatform.metadata.metadata import metadata_service -from google.cloud.aiplatform import constants +from google.cloud.aiplatform.constants import base as constants from google.cloud.aiplatform import utils from google.cloud.aiplatform_v1.services.model_service import ( diff --git a/tests/unit/enhanced_library/test_value_converter.py b/tests/unit/enhanced_library/test_value_converter.py index 4cbaeed7cf..4bb24372e2 100644 --- a/tests/unit/enhanced_library/test_value_converter.py +++ b/tests/unit/enhanced_library/test_value_converter.py @@ -13,7 +13,7 @@ # limitations under the License. from __future__ import absolute_import -from google.cloud.aiplatform.helpers import value_converter +from google.cloud.aiplatform.utils.enhanced_library import value_converter from google.protobuf import json_format from google.protobuf.struct_pb2 import Value import proto