Skip to content

Commit

Permalink
feat: Add prediction container URI builder method (#805)
Browse files Browse the repository at this point in the history
* Initial container URI builder

* Address minor requested changes

* Move enhanced_library from helpers to utils

* Update logic of uri helper to use programmatic map

* Split aiplatform.constants into multiple modules

* Add tests for 1P pred container URI helper, fixes

* Address final requested changes, update OWNERS

* Add newest prediction containers
  • Loading branch information
vinnysenthil committed Nov 24, 2021
1 parent 004bf5f commit 91dd3c0
Show file tree
Hide file tree
Showing 20 changed files with 464 additions and 12 deletions.
3 changes: 3 additions & 0 deletions .github/CODEOWNERS
Validating CODEOWNERS rules …
Expand Up @@ -26,3 +26,6 @@
CHANGELOG.md @googleapis/cloud-aiplatform-model-builder-sdk @googleapis/cdpe-cloudai
README.rst @googleapis/cloud-aiplatform-model-builder-sdk @googleapis/cdpe-cloudai
setup.py @googleapis/cloud-aiplatform-model-builder-sdk @googleapis/cdpe-cloudai

# Vertex AI product team-specific ownership
/google/cloud/aiplatform/constants/prediction.py @googleapis/vertex-prediction-team
2 changes: 2 additions & 0 deletions google/cloud/aiplatform/__init__.py
Expand Up @@ -53,6 +53,7 @@
AutoMLTextTrainingJob,
AutoMLVideoTrainingJob,
)
from google.cloud.aiplatform import helpers

"""
Usage:
Expand All @@ -73,6 +74,7 @@
"explain",
"gapic",
"init",
"helpers",
"hyperparameter_tuning",
"log_params",
"log_metrics",
Expand Down
18 changes: 18 additions & 0 deletions google/cloud/aiplatform/constants/__init__.py
@@ -0,0 +1,18 @@
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from google.cloud.aiplatform.constants import base
from google.cloud.aiplatform.constants import prediction

__all__ = ("base", "prediction")
File renamed without changes.
138 changes: 138 additions & 0 deletions google/cloud/aiplatform/constants/prediction.py
@@ -0,0 +1,138 @@
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import re

from collections import defaultdict

# [region]-docker.pkg.dev/vertex-ai/prediction/[framework]-[accelerator].[version]:latest
CONTAINER_URI_PATTERN = re.compile(
r"(?P<region>[\w]+)\-docker\.pkg\.dev\/vertex\-ai\/prediction\/"
r"(?P<framework>[\w]+)\-(?P<accelerator>[\w]+)\.(?P<version>[\d-]+):latest"
)

SKLEARN = "sklearn"
TF = "tf"
TF2 = "tf2"
XGBOOST = "xgboost"

XGBOOST_CONTAINER_URIS = [
"us-docker.pkg.dev/vertex-ai/prediction/xgboost-cpu.1-4:latest",
"europe-docker.pkg.dev/vertex-ai/prediction/xgboost-cpu.1-4:latest",
"asia-docker.pkg.dev/vertex-ai/prediction/xgboost-cpu.1-4:latest",
"us-docker.pkg.dev/vertex-ai/prediction/xgboost-cpu.1-3:latest",
"europe-docker.pkg.dev/vertex-ai/prediction/xgboost-cpu.1-3:latest",
"asia-docker.pkg.dev/vertex-ai/prediction/xgboost-cpu.1-3:latest",
"us-docker.pkg.dev/vertex-ai/prediction/xgboost-cpu.1-2:latest",
"europe-docker.pkg.dev/vertex-ai/prediction/xgboost-cpu.1-2:latest",
"asia-docker.pkg.dev/vertex-ai/prediction/xgboost-cpu.1-2:latest",
"us-docker.pkg.dev/vertex-ai/prediction/xgboost-cpu.1-1:latest",
"europe-docker.pkg.dev/vertex-ai/prediction/xgboost-cpu.1-1:latest",
"asia-docker.pkg.dev/vertex-ai/prediction/xgboost-cpu.1-1:latest",
"us-docker.pkg.dev/vertex-ai/prediction/xgboost-cpu.0-90:latest",
"europe-docker.pkg.dev/vertex-ai/prediction/xgboost-cpu.0-90:latest",
"asia-docker.pkg.dev/vertex-ai/prediction/xgboost-cpu.0-90:latest",
"us-docker.pkg.dev/vertex-ai/prediction/xgboost-cpu.0-82:latest",
"europe-docker.pkg.dev/vertex-ai/prediction/xgboost-cpu.0-82:latest",
"asia-docker.pkg.dev/vertex-ai/prediction/xgboost-cpu.0-82:latest",
]

SKLEARN_CONTAINER_URIS = [
"us-docker.pkg.dev/vertex-ai/prediction/sklearn-cpu.1-0:latest",
"europe-docker.pkg.dev/vertex-ai/prediction/sklearn-cpu.1-0:latest",
"asia-docker.pkg.dev/vertex-ai/prediction/sklearn-cpu.1-0:latest",
"us-docker.pkg.dev/vertex-ai/prediction/sklearn-cpu.0-24:latest",
"europe-docker.pkg.dev/vertex-ai/prediction/sklearn-cpu.0-24:latest",
"asia-docker.pkg.dev/vertex-ai/prediction/sklearn-cpu.0-24:latest",
"us-docker.pkg.dev/vertex-ai/prediction/sklearn-cpu.0-23:latest",
"europe-docker.pkg.dev/vertex-ai/prediction/sklearn-cpu.0-23:latest",
"asia-docker.pkg.dev/vertex-ai/prediction/sklearn-cpu.0-23:latest",
"us-docker.pkg.dev/vertex-ai/prediction/sklearn-cpu.0-22:latest",
"europe-docker.pkg.dev/vertex-ai/prediction/sklearn-cpu.0-22:latest",
"asia-docker.pkg.dev/vertex-ai/prediction/sklearn-cpu.0-22:latest",
"us-docker.pkg.dev/vertex-ai/prediction/sklearn-cpu.0-20:latest",
"europe-docker.pkg.dev/vertex-ai/prediction/sklearn-cpu.0-20:latest",
"asia-docker.pkg.dev/vertex-ai/prediction/sklearn-cpu.0-20:latest",
]

TF_CONTAINER_URIS = [
"us-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-7:latest",
"europe-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-7:latest",
"asia-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-7:latest",
"us-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-7:latest",
"europe-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-7:latest",
"asia-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-7:latest",
"us-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-6:latest",
"europe-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-6:latest",
"asia-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-6:latest",
"us-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-6:latest",
"europe-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-6:latest",
"asia-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-6:latest",
"us-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-5:latest",
"europe-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-5:latest",
"asia-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-5:latest",
"us-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-5:latest",
"europe-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-5:latest",
"asia-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-5:latest",
"us-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-4:latest",
"europe-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-4:latest",
"asia-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-4:latest",
"us-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-4:latest",
"europe-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-4:latest",
"asia-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-4:latest",
"us-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-3:latest",
"europe-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-3:latest",
"asia-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-3:latest",
"us-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-3:latest",
"europe-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-3:latest",
"asia-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-3:latest",
"us-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-2:latest",
"europe-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-2:latest",
"asia-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-2:latest",
"us-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-2:latest",
"europe-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-2:latest",
"asia-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-2:latest",
"us-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-1:latest",
"europe-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-1:latest",
"asia-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-1:latest",
"us-docker.pkg.dev/vertex-ai/prediction/tf-cpu.1-15:latest",
"europe-docker.pkg.dev/vertex-ai/prediction/tf-cpu.1-15:latest",
"asia-docker.pkg.dev/vertex-ai/prediction/tf-cpu.1-15:latest",
"us-docker.pkg.dev/vertex-ai/prediction/tf-gpu.1-15:latest",
"europe-docker.pkg.dev/vertex-ai/prediction/tf-gpu.1-15:latest",
"asia-docker.pkg.dev/vertex-ai/prediction/tf-gpu.1-15:latest",
]

SERVING_CONTAINER_URIS = (
SKLEARN_CONTAINER_URIS + TF_CONTAINER_URIS + XGBOOST_CONTAINER_URIS
)

# Map of all first-party prediction containers
d = defaultdict(lambda: defaultdict(lambda: defaultdict(lambda: defaultdict(str))))

for container_uri in SERVING_CONTAINER_URIS:
m = CONTAINER_URI_PATTERN.match(container_uri)
region, framework, accelerator, version = m[1], m[2], m[3], m[4]
version = version.replace("-", ".")

if framework in (TF2, TF): # Store both `tf`, `tf2` as `tensorflow`
framework = "tensorflow"

d[region][framework][accelerator][version] = container_uri

_SERVING_CONTAINER_URI_MAP = d

_SERVING_CONTAINER_DOCUMENTATION_URL = (
"https://cloud.google.com/vertex-ai/docs/predictions/pre-built-containers"
)
2 changes: 1 addition & 1 deletion google/cloud/aiplatform/gapic/schema/__init__.py
Expand Up @@ -14,7 +14,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

from google.cloud.aiplatform.helpers import _decorators
from google.cloud.aiplatform.utils.enhanced_library import _decorators
from google.cloud.aiplatform.v1.schema import predict
from google.cloud.aiplatform.v1.schema import trainingjob
from google.cloud.aiplatform.v1beta1.schema import predict as predict_v1beta1
Expand Down
22 changes: 20 additions & 2 deletions google/cloud/aiplatform/helpers/__init__.py
@@ -1,3 +1,21 @@
from google.cloud.aiplatform.helpers import value_converter
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

__all__ = (value_converter,)
from google.cloud.aiplatform.helpers import container_uri_builders

get_prebuilt_prediction_container_uri = (
container_uri_builders.get_prebuilt_prediction_container_uri
)

__all__ = "get_prebuilt_prediction_container_uri"
109 changes: 109 additions & 0 deletions google/cloud/aiplatform/helpers/container_uri_builders.py
@@ -0,0 +1,109 @@
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from typing import Optional

from google.cloud.aiplatform.constants import prediction
from google.cloud.aiplatform import initializer


def get_prebuilt_prediction_container_uri(
framework: str,
framework_version: str,
region: Optional[str] = None,
accelerator: str = "cpu",
) -> str:
"""
Get a Vertex AI pre-built prediction Docker container URI for
a given framework, version, region, and accelerator use.
Example usage:
```
uri = aiplatform.helpers.get_prebuilt_prediction_container_uri(
framework="tensorflow",
framework_version="2.6",
accelerator="gpu"
)
model = aiplatform.Model.upload(
display_name="boston_housing_",
artifact_uri="gs://my-bucket/my-model/",
serving_container_image_uri=uri
)
```
Args:
framework (str):
Required. The ML framework of the pre-built container. For example,
`"tensorflow"`, `"xgboost"`, or `"sklearn"`
framework_version (str):
Required. The version of the specified ML framework as a string.
region (str):
Optional. AI region or multi-region. Used to select the correct
Artifact Registry multi-region repository and reduce latency.
Must start with `"us"`, `"asia"` or `"europe"`.
Default is location set by `aiplatform.init()`.
accelerator (str):
Optional. The type of accelerator support provided by container. For
example: `"cpu"` or `"gpu"`
Default is `"cpu"`.
Returns:
uri (str):
A Vertex AI prediction container URI
Raises:
ValueError: If containers for provided framework are unavailable or the
container does not support the specified version, accelerator, or region.
"""
URI_MAP = prediction._SERVING_CONTAINER_URI_MAP
DOCS_URI_MESSAGE = (
f"See {prediction._SERVING_CONTAINER_DOCUMENTATION_URL} "
"for complete list of supported containers"
)

# If region not provided, use initializer location
region = region or initializer.global_config.location
region = region.split("-", 1)[0]
framework = framework.lower()

if not URI_MAP.get(region):
raise ValueError(
f"Unsupported container region `{region}`, supported regions are "
f"{', '.join(URI_MAP.keys())}. "
f"{DOCS_URI_MESSAGE}"
)

if not URI_MAP[region].get(framework):
raise ValueError(
f"No containers found for framework `{framework}`. Supported frameworks are "
f"{', '.join(URI_MAP[region].keys())} {DOCS_URI_MESSAGE}"
)

if not URI_MAP[region][framework].get(accelerator):
raise ValueError(
f"{framework} containers do not support `{accelerator}` accelerator. Supported accelerators "
f"are {', '.join(URI_MAP[region][framework].keys())}. {DOCS_URI_MESSAGE}"
)

final_uri = URI_MAP[region][framework][accelerator].get(framework_version)

if not final_uri:
raise ValueError(
f"No serving container for `{framework}` version `{framework_version}` "
f"with accelerator `{accelerator}` found. Supported versions "
f"include {', '.join(URI_MAP[region][framework][accelerator].keys())}. {DOCS_URI_MESSAGE}"
)

return final_uri
2 changes: 1 addition & 1 deletion google/cloud/aiplatform/initializer.py
Expand Up @@ -29,7 +29,7 @@
from google.auth.exceptions import GoogleAuthError

from google.cloud.aiplatform import compat
from google.cloud.aiplatform import constants
from google.cloud.aiplatform.constants import base as constants
from google.cloud.aiplatform import utils
from google.cloud.aiplatform.metadata import metadata

Expand Down
2 changes: 1 addition & 1 deletion google/cloud/aiplatform/jobs.py
Expand Up @@ -42,7 +42,7 @@
machine_resources as gca_machine_resources_compat,
study as gca_study_compat,
)
from google.cloud.aiplatform import constants
from google.cloud.aiplatform.constants import base as constants
from google.cloud.aiplatform import initializer
from google.cloud.aiplatform import hyperparameter_tuning
from google.cloud.aiplatform import utils
Expand Down
3 changes: 2 additions & 1 deletion google/cloud/aiplatform/tensorboard/uploader_main.py
Expand Up @@ -31,6 +31,7 @@
from google.api_core import exceptions
from google.cloud import storage
from google.cloud import aiplatform
from google.cloud.aiplatform.constants import base as constants
from google.cloud.aiplatform import jobs
from google.cloud.aiplatform.tensorboard import uploader
from google.cloud.aiplatform.utils import TensorboardClientWithOverride
Expand Down Expand Up @@ -91,7 +92,7 @@ def main(argv):
if len(argv) > 1:
raise app.UsageError("Too many command-line arguments.")

aiplatform.constants.API_BASE_PATH = FLAGS.api_uri
constants.API_BASE_PATH = FLAGS.api_uri
m = re.match(
"projects/(.*)/locations/(.*)/tensorboards/.*", FLAGS.tensorboard_resource_name
)
Expand Down
2 changes: 1 addition & 1 deletion google/cloud/aiplatform/training_jobs.py
Expand Up @@ -23,7 +23,7 @@

from google.auth import credentials as auth_credentials
from google.cloud.aiplatform import base
from google.cloud.aiplatform import constants
from google.cloud.aiplatform.constants import base as constants
from google.cloud.aiplatform import datasets
from google.cloud.aiplatform import initializer
from google.cloud.aiplatform import models
Expand Down
2 changes: 1 addition & 1 deletion google/cloud/aiplatform/utils/__init__.py
Expand Up @@ -30,7 +30,7 @@
from google.cloud import storage

from google.cloud.aiplatform import compat
from google.cloud.aiplatform import constants
from google.cloud.aiplatform.constants import base as constants
from google.cloud.aiplatform import initializer

from google.cloud.aiplatform.compat.services import (
Expand Down
13 changes: 13 additions & 0 deletions google/cloud/aiplatform/utils/enhanced_library/__init__.py
@@ -0,0 +1,13 @@
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Expand Up @@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from google.cloud.aiplatform.helpers import value_converter
from google.cloud.aiplatform.utils.enhanced_library import value_converter

from proto.marshal import Marshal
from proto.marshal.rules.struct import ValueRule
Expand Down

0 comments on commit 91dd3c0

Please sign in to comment.