Skip to content

Commit

Permalink
feat: add prediction service RPC RawPredict to aiplatform_v1beta1 fea…
Browse files Browse the repository at this point in the history
…t: add tensorboard service RPCs to aiplatform_v1beta1: BatchCreateTensorboardRuns, BatchCreateTensorboardTimeSeries, WriteTensorboardExperimentData feat: add model_deployment_monitori... (#670)

* feat: add prediction service RPC RawPredict to aiplatform_v1beta1 feat: add tensorboard service RPCs to aiplatform_v1beta1: BatchCreateTensorboardRuns, BatchCreateTensorboardTimeSeries, WriteTensorboardExperimentData feat: add model_deployment_monitoring_job to Endpoint in aiplatform_v1beta1 feat: add deployment_group to DeployedIndex in aiplatform_v1beta1 feat: add ModelEvaluationExplanationSpec in aiplatform_v1beta1

Committer: @dizcology
PiperOrigin-RevId: 393890669

Source-Link: googleapis/googleapis@321abab

Source-Link: googleapis/googleapis-gen@629290d

* 🦉 Updates from OwlBot

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
  • Loading branch information
gcf-owl-bot[bot] and gcf-owl-bot[bot] committed Aug 31, 2021
1 parent 1fbce55 commit b73cd94
Show file tree
Hide file tree
Showing 54 changed files with 2,839 additions and 242 deletions.
14 changes: 14 additions & 0 deletions google/cloud/aiplatform_v1beta1/__init__.py
Expand Up @@ -376,6 +376,7 @@
from .types.prediction_service import ExplainResponse
from .types.prediction_service import PredictRequest
from .types.prediction_service import PredictResponse
from .types.prediction_service import RawPredictRequest
from .types.specialist_pool import SpecialistPool
from .types.specialist_pool_service import CreateSpecialistPoolOperationMetadata
from .types.specialist_pool_service import CreateSpecialistPoolRequest
Expand All @@ -398,6 +399,10 @@
from .types.tensorboard_data import TimeSeriesDataPoint
from .types.tensorboard_experiment import TensorboardExperiment
from .types.tensorboard_run import TensorboardRun
from .types.tensorboard_service import BatchCreateTensorboardRunsRequest
from .types.tensorboard_service import BatchCreateTensorboardRunsResponse
from .types.tensorboard_service import BatchCreateTensorboardTimeSeriesRequest
from .types.tensorboard_service import BatchCreateTensorboardTimeSeriesResponse
from .types.tensorboard_service import CreateTensorboardExperimentRequest
from .types.tensorboard_service import CreateTensorboardOperationMetadata
from .types.tensorboard_service import CreateTensorboardRequest
Expand Down Expand Up @@ -430,6 +435,8 @@
from .types.tensorboard_service import UpdateTensorboardRequest
from .types.tensorboard_service import UpdateTensorboardRunRequest
from .types.tensorboard_service import UpdateTensorboardTimeSeriesRequest
from .types.tensorboard_service import WriteTensorboardExperimentDataRequest
from .types.tensorboard_service import WriteTensorboardExperimentDataResponse
from .types.tensorboard_service import WriteTensorboardRunDataRequest
from .types.tensorboard_service import WriteTensorboardRunDataResponse
from .types.tensorboard_time_series import TensorboardTimeSeries
Expand Down Expand Up @@ -503,6 +510,10 @@
"BatchCreateFeaturesOperationMetadata",
"BatchCreateFeaturesRequest",
"BatchCreateFeaturesResponse",
"BatchCreateTensorboardRunsRequest",
"BatchCreateTensorboardRunsResponse",
"BatchCreateTensorboardTimeSeriesRequest",
"BatchCreateTensorboardTimeSeriesResponse",
"BatchDedicatedResources",
"BatchMigrateResourcesOperationMetadata",
"BatchMigrateResourcesRequest",
Expand Down Expand Up @@ -825,6 +836,7 @@
"QueryArtifactLineageSubgraphRequest",
"QueryContextLineageSubgraphRequest",
"QueryExecutionInputsAndOutputsRequest",
"RawPredictRequest",
"ReadFeatureValuesRequest",
"ReadFeatureValuesResponse",
"ReadTensorboardBlobDataRequest",
Expand Down Expand Up @@ -906,6 +918,8 @@
"Value",
"VizierServiceClient",
"WorkerPoolSpec",
"WriteTensorboardExperimentDataRequest",
"WriteTensorboardExperimentDataResponse",
"WriteTensorboardRunDataRequest",
"WriteTensorboardRunDataResponse",
"XraiAttribution",
Expand Down
40 changes: 40 additions & 0 deletions google/cloud/aiplatform_v1beta1/gapic_metadata.json
Expand Up @@ -1493,6 +1493,11 @@
"methods": [
"predict"
]
},
"RawPredict": {
"methods": [
"raw_predict"
]
}
}
},
Expand All @@ -1508,6 +1513,11 @@
"methods": [
"predict"
]
},
"RawPredict": {
"methods": [
"raw_predict"
]
}
}
}
Expand Down Expand Up @@ -1582,6 +1592,16 @@
"grpc": {
"libraryClient": "TensorboardServiceClient",
"rpcs": {
"BatchCreateTensorboardRuns": {
"methods": [
"batch_create_tensorboard_runs"
]
},
"BatchCreateTensorboardTimeSeries": {
"methods": [
"batch_create_tensorboard_time_series"
]
},
"CreateTensorboard": {
"methods": [
"create_tensorboard"
Expand Down Expand Up @@ -1697,6 +1717,11 @@
"update_tensorboard_time_series"
]
},
"WriteTensorboardExperimentData": {
"methods": [
"write_tensorboard_experiment_data"
]
},
"WriteTensorboardRunData": {
"methods": [
"write_tensorboard_run_data"
Expand All @@ -1707,6 +1732,16 @@
"grpc-async": {
"libraryClient": "TensorboardServiceAsyncClient",
"rpcs": {
"BatchCreateTensorboardRuns": {
"methods": [
"batch_create_tensorboard_runs"
]
},
"BatchCreateTensorboardTimeSeries": {
"methods": [
"batch_create_tensorboard_time_series"
]
},
"CreateTensorboard": {
"methods": [
"create_tensorboard"
Expand Down Expand Up @@ -1822,6 +1857,11 @@
"update_tensorboard_time_series"
]
},
"WriteTensorboardExperimentData": {
"methods": [
"write_tensorboard_experiment_data"
]
},
"WriteTensorboardRunData": {
"methods": [
"write_tensorboard_run_data"
Expand Down
Expand Up @@ -47,7 +47,9 @@


class DatasetServiceAsyncClient:
""""""
"""The service that handles the CRUD of Vertex AI Dataset and
its child resources.
"""

_client: DatasetServiceClient

Expand Down
Expand Up @@ -84,7 +84,9 @@ def get_transport_class(cls, label: str = None,) -> Type[DatasetServiceTransport


class DatasetServiceClient(metaclass=DatasetServiceClientMeta):
""""""
"""The service that handles the CRUD of Vertex AI Dataset and
its child resources.
"""

@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
Expand Down
Expand Up @@ -36,6 +36,9 @@
class DatasetServiceGrpcTransport(DatasetServiceTransport):
"""gRPC backend transport for DatasetService.
The service that handles the CRUD of Vertex AI Dataset and
its child resources.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
Expand Down
Expand Up @@ -38,6 +38,9 @@
class DatasetServiceGrpcAsyncIOTransport(DatasetServiceTransport):
"""gRPC AsyncIO backend transport for DatasetService.
The service that handles the CRUD of Vertex AI Dataset and
its child resources.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
Expand Down
Expand Up @@ -43,7 +43,7 @@


class EndpointServiceAsyncClient:
""""""
"""A service for managing Vertex AI's Endpoints."""

_client: EndpointServiceClient

Expand All @@ -54,6 +54,12 @@ class EndpointServiceAsyncClient:
parse_endpoint_path = staticmethod(EndpointServiceClient.parse_endpoint_path)
model_path = staticmethod(EndpointServiceClient.model_path)
parse_model_path = staticmethod(EndpointServiceClient.parse_model_path)
model_deployment_monitoring_job_path = staticmethod(
EndpointServiceClient.model_deployment_monitoring_job_path
)
parse_model_deployment_monitoring_job_path = staticmethod(
EndpointServiceClient.parse_model_deployment_monitoring_job_path
)
network_path = staticmethod(EndpointServiceClient.network_path)
parse_network_path = staticmethod(EndpointServiceClient.parse_network_path)
common_billing_account_path = staticmethod(
Expand Down
Expand Up @@ -80,7 +80,7 @@ def get_transport_class(cls, label: str = None,) -> Type[EndpointServiceTranspor


class EndpointServiceClient(metaclass=EndpointServiceClientMeta):
""""""
"""A service for managing Vertex AI's Endpoints."""

@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
Expand Down Expand Up @@ -196,6 +196,26 @@ def parse_model_path(path: str) -> Dict[str, str]:
)
return m.groupdict() if m else {}

@staticmethod
def model_deployment_monitoring_job_path(
project: str, location: str, model_deployment_monitoring_job: str,
) -> str:
"""Returns a fully-qualified model_deployment_monitoring_job string."""
return "projects/{project}/locations/{location}/modelDeploymentMonitoringJobs/{model_deployment_monitoring_job}".format(
project=project,
location=location,
model_deployment_monitoring_job=model_deployment_monitoring_job,
)

@staticmethod
def parse_model_deployment_monitoring_job_path(path: str) -> Dict[str, str]:
"""Parses a model_deployment_monitoring_job path into its component segments."""
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/modelDeploymentMonitoringJobs/(?P<model_deployment_monitoring_job>.+?)$",
path,
)
return m.groupdict() if m else {}

@staticmethod
def network_path(project: str, network: str,) -> str:
"""Returns a fully-qualified network string."""
Expand Down
Expand Up @@ -35,6 +35,8 @@
class EndpointServiceGrpcTransport(EndpointServiceTransport):
"""gRPC backend transport for EndpointService.
A service for managing Vertex AI's Endpoints.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
Expand Down
Expand Up @@ -37,6 +37,8 @@
class EndpointServiceGrpcAsyncIOTransport(EndpointServiceTransport):
"""gRPC AsyncIO backend transport for EndpointService.
A service for managing Vertex AI's Endpoints.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
Expand Down
Expand Up @@ -468,7 +468,8 @@ async def delete_metadata_store(
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Deletes a single MetadataStore.
r"""Deletes a single MetadataStore and all its child
resources (Artifacts, Executions, and Contexts).
Args:
request (:class:`google.cloud.aiplatform_v1beta1.types.DeleteMetadataStoreRequest`):
Expand Down Expand Up @@ -830,6 +831,8 @@ async def update_artifact(
update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`):
Required. A FieldMask indicating
which fields should be updated.
Functionality of this field is not yet
supported.
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
Expand Down Expand Up @@ -1339,6 +1342,8 @@ async def update_context(
update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`):
Required. A FieldMask indicating
which fields should be updated.
Functionality of this field is not yet
supported.
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
Expand Down Expand Up @@ -2117,6 +2122,8 @@ async def update_execution(
update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`):
Required. A FieldMask indicating
which fields should be updated.
Functionality of this field is not yet
supported.
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
Expand Down
Expand Up @@ -729,7 +729,8 @@ def delete_metadata_store(
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> gac_operation.Operation:
r"""Deletes a single MetadataStore.
r"""Deletes a single MetadataStore and all its child
resources (Artifacts, Executions, and Contexts).
Args:
request (google.cloud.aiplatform_v1beta1.types.DeleteMetadataStoreRequest):
Expand Down Expand Up @@ -1091,6 +1092,8 @@ def update_artifact(
update_mask (google.protobuf.field_mask_pb2.FieldMask):
Required. A FieldMask indicating
which fields should be updated.
Functionality of this field is not yet
supported.
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
Expand Down Expand Up @@ -1600,6 +1603,8 @@ def update_context(
update_mask (google.protobuf.field_mask_pb2.FieldMask):
Required. A FieldMask indicating
which fields should be updated.
Functionality of this field is not yet
supported.
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
Expand Down Expand Up @@ -2384,6 +2389,8 @@ def update_execution(
update_mask (google.protobuf.field_mask_pb2.FieldMask):
Required. A FieldMask indicating
which fields should be updated.
Functionality of this field is not yet
supported.
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
Expand Down
Expand Up @@ -346,7 +346,8 @@ def delete_metadata_store(
]:
r"""Return a callable for the delete metadata store method over gRPC.
Deletes a single MetadataStore.
Deletes a single MetadataStore and all its child
resources (Artifacts, Executions, and Contexts).
Returns:
Callable[[~.DeleteMetadataStoreRequest],
Expand Down
Expand Up @@ -354,7 +354,8 @@ def delete_metadata_store(
]:
r"""Return a callable for the delete metadata store method over gRPC.
Deletes a single MetadataStore.
Deletes a single MetadataStore and all its child
resources (Artifacts, Executions, and Contexts).
Returns:
Callable[[~.DeleteMetadataStoreRequest],
Expand Down
Expand Up @@ -179,16 +179,19 @@ def parse_annotated_dataset_path(path: str) -> Dict[str, str]:
return m.groupdict() if m else {}

@staticmethod
def dataset_path(project: str, dataset: str,) -> str:
def dataset_path(project: str, location: str, dataset: str,) -> str:
"""Returns a fully-qualified dataset string."""
return "projects/{project}/datasets/{dataset}".format(
project=project, dataset=dataset,
return "projects/{project}/locations/{location}/datasets/{dataset}".format(
project=project, location=location, dataset=dataset,
)

@staticmethod
def parse_dataset_path(path: str) -> Dict[str, str]:
"""Parses a dataset path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)/datasets/(?P<dataset>.+?)$", path)
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/datasets/(?P<dataset>.+?)$",
path,
)
return m.groupdict() if m else {}

@staticmethod
Expand All @@ -208,19 +211,16 @@ def parse_dataset_path(path: str) -> Dict[str, str]:
return m.groupdict() if m else {}

@staticmethod
def dataset_path(project: str, location: str, dataset: str,) -> str:
def dataset_path(project: str, dataset: str,) -> str:
"""Returns a fully-qualified dataset string."""
return "projects/{project}/locations/{location}/datasets/{dataset}".format(
project=project, location=location, dataset=dataset,
return "projects/{project}/datasets/{dataset}".format(
project=project, dataset=dataset,
)

@staticmethod
def parse_dataset_path(path: str) -> Dict[str, str]:
"""Parses a dataset path into its component segments."""
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/datasets/(?P<dataset>.+?)$",
path,
)
m = re.match(r"^projects/(?P<project>.+?)/datasets/(?P<dataset>.+?)$", path)
return m.groupdict() if m else {}

@staticmethod
Expand Down

0 comments on commit b73cd94

Please sign in to comment.