diff --git a/README.rst b/README.rst index 3db9fc5022..209b577ead 100644 --- a/README.rst +++ b/README.rst @@ -4,13 +4,7 @@ Python Client for Cloud AI Platform |beta| |pypi| |versions| -:Warning: This library is a pre-release product and is subject to breaking changes. - -`Cloud AI Platform`_: Cloud AI Platform is a suite of machine learning tools that enables - developers to train high-quality models specific to their business needs. - It offers both novices and experts the best workbench for machine learning - development by leveraging Google's state-of-the-art transfer learning and - Neural Architecture Search technology. +`Cloud AI Platform`_: Google Cloud AI Platform is an integrated suite of machine learning tools and services for building and using ML models with AutoML or custom code. It offers both novices and experts the best workbench for the entire machine learning development lifecycle. - `Client Library Documentation`_ - `Product Documentation`_ @@ -21,9 +15,9 @@ Python Client for Cloud AI Platform :target: https://pypi.org/project/google-cloud-aiplatform/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-aiplatform.svg :target: https://pypi.org/project/google-cloud-aiplatform/ -.. _Cloud AI Platform: https://cloud.google.com/ai-platform/docs +.. _Cloud AI Platform: https://cloud.google.com/ai-platform-unified/docs .. _Client Library Documentation: https://googleapis.dev/python/aiplatform/latest -.. _Product Documentation: https://cloud.google.com/ai-platform/docs +.. _Product Documentation: https://cloud.google.com/ai-platform-unified/docs Quick Start ----------- @@ -85,5 +79,5 @@ Next Steps - View this `README`_ to see the full list of Cloud APIs that we cover. -.. _Cloud AI Platform API Product documentation: https://cloud.google.com/ai-platform/docs +.. _Cloud AI Platform API Product documentation: https://cloud.google.com/ai-platform-unified/docs .. _README: https://github.com/googleapis/google-cloud-python/blob/master/README.rst \ No newline at end of file diff --git a/samples/snippets/create_batch_prediction_job_text_classification_sample.py b/samples/snippets/create_batch_prediction_job_text_classification_sample.py new file mode 100644 index 0000000000..a9a9ad6e67 --- /dev/null +++ b/samples/snippets/create_batch_prediction_job_text_classification_sample.py @@ -0,0 +1,55 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# [START aiplatform_create_batch_prediction_job_text_classification_sample] +from google.cloud import aiplatform +from google.protobuf.struct_pb2 import Value + + +def create_batch_prediction_job_text_classification_sample( + project: str, + display_name: str, + model: str, + gcs_source_uri: str, + gcs_destination_output_uri_prefix: str, + location: str = "us-central1", + api_endpoint: str = "us-central1-aiplatform.googleapis.com", +): + client_options = {"api_endpoint": api_endpoint} + # Initialize client that will be used to create and send requests. + # This client only needs to be created once, and can be reused for multiple requests. + client = aiplatform.gapic.JobServiceClient(client_options=client_options) + + batch_prediction_job = { + "display_name": display_name, + # Format: 'projects/{project}/locations/{location}/models/{model_id}' + "model": model, + "model_parameters": Value(), + "input_config": { + "instances_format": "jsonl", + "gcs_source": {"uris": [gcs_source_uri]}, + }, + "output_config": { + "predictions_format": "jsonl", + "gcs_destination": {"output_uri_prefix": gcs_destination_output_uri_prefix}, + }, + } + parent = f"projects/{project}/locations/{location}" + response = client.create_batch_prediction_job( + parent=parent, batch_prediction_job=batch_prediction_job + ) + print("response:", response) + + +# [END aiplatform_create_batch_prediction_job_text_classification_sample] diff --git a/samples/snippets/create_batch_prediction_job_text_classification_sample_test.py b/samples/snippets/create_batch_prediction_job_text_classification_sample_test.py new file mode 100644 index 0000000000..86d73b5ac2 --- /dev/null +++ b/samples/snippets/create_batch_prediction_job_text_classification_sample_test.py @@ -0,0 +1,85 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from uuid import uuid4 +import pytest +import os + +import helpers + +import create_batch_prediction_job_text_classification_sample +import cancel_batch_prediction_job_sample +import delete_batch_prediction_job_sample + +from google.cloud import aiplatform + +PROJECT_ID = os.getenv("BUILD_SPECIFIC_GCLOUD_PROJECT") +LOCATION = "us-central1" +MODEL_ID = "3863595899074641920" # Permanent restaurant rating model +DISPLAY_NAME = f"temp_create_batch_prediction_tcn_test_{uuid4()}" +GCS_SOURCE_URI = ( + "gs://ucaip-samples-test-output/inputs/batch_predict_TCN/tcn_inputs.jsonl" +) +GCS_OUTPUT_URI = "gs://ucaip-samples-test-output/" + + +@pytest.fixture(scope="function") +def shared_state(): + + shared_state = {} + + yield shared_state + + assert "/" in shared_state["batch_prediction_job_name"] + + batch_prediction_job = shared_state["batch_prediction_job_name"].split("/")[-1] + + # Stop the batch prediction job + cancel_batch_prediction_job_sample.cancel_batch_prediction_job_sample( + project=PROJECT_ID, batch_prediction_job_id=batch_prediction_job + ) + + job_client = aiplatform.gapic.JobServiceClient( + client_options={"api_endpoint": "us-central1-aiplatform.googleapis.com"} + ) + + # Waiting for batch prediction job to be in CANCELLED state + helpers.wait_for_job_state( + get_job_method=job_client.get_batch_prediction_job, + name=shared_state["batch_prediction_job_name"], + ) + + # Delete the batch prediction job + delete_batch_prediction_job_sample.delete_batch_prediction_job_sample( + project=PROJECT_ID, batch_prediction_job_id=batch_prediction_job + ) + + +# Creating AutoML Text Classification batch prediction job +def test_ucaip_generated_create_batch_prediction_tcn_sample(capsys, shared_state): + + model_name = f"projects/{PROJECT_ID}/locations/{LOCATION}/models/{MODEL_ID}" + + create_batch_prediction_job_text_classification_sample.create_batch_prediction_job_text_classification_sample( + project=PROJECT_ID, + display_name=DISPLAY_NAME, + model=model_name, + gcs_source_uri=GCS_SOURCE_URI, + gcs_destination_output_uri_prefix=GCS_OUTPUT_URI, + ) + + out, _ = capsys.readouterr() + + # Save resource name of the newly created batch prediction job + shared_state["batch_prediction_job_name"] = helpers.get_name(out) diff --git a/samples/snippets/create_batch_prediction_job_text_entity_extraction_sample.py b/samples/snippets/create_batch_prediction_job_text_entity_extraction_sample.py new file mode 100644 index 0000000000..ec950613a0 --- /dev/null +++ b/samples/snippets/create_batch_prediction_job_text_entity_extraction_sample.py @@ -0,0 +1,55 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# [START aiplatform_create_batch_prediction_job_text_entity_extraction_sample] +from google.cloud import aiplatform +from google.protobuf.struct_pb2 import Value + + +def create_batch_prediction_job_text_entity_extraction_sample( + project: str, + display_name: str, + model: str, + gcs_source_uri: str, + gcs_destination_output_uri_prefix: str, + location: str = "us-central1", + api_endpoint: str = "us-central1-aiplatform.googleapis.com", +): + client_options = {"api_endpoint": api_endpoint} + # Initialize client that will be used to create and send requests. + # This client only needs to be created once, and can be reused for multiple requests. + client = aiplatform.gapic.JobServiceClient(client_options=client_options) + + batch_prediction_job = { + "display_name": display_name, + # Format: 'projects/{project}/locations/{location}/models/{model_id}' + "model": model, + "model_parameters": Value(), + "input_config": { + "instances_format": "jsonl", + "gcs_source": {"uris": [gcs_source_uri]}, + }, + "output_config": { + "predictions_format": "jsonl", + "gcs_destination": {"output_uri_prefix": gcs_destination_output_uri_prefix}, + }, + } + parent = f"projects/{project}/locations/{location}" + response = client.create_batch_prediction_job( + parent=parent, batch_prediction_job=batch_prediction_job + ) + print("response:", response) + + +# [END aiplatform_create_batch_prediction_job_text_entity_extraction_sample] diff --git a/samples/snippets/create_batch_prediction_job_text_entity_extraction_sample_test.py b/samples/snippets/create_batch_prediction_job_text_entity_extraction_sample_test.py new file mode 100644 index 0000000000..c86395d623 --- /dev/null +++ b/samples/snippets/create_batch_prediction_job_text_entity_extraction_sample_test.py @@ -0,0 +1,85 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from uuid import uuid4 +import pytest +import os + +import helpers + +import create_batch_prediction_job_text_entity_extraction_sample +import cancel_batch_prediction_job_sample +import delete_batch_prediction_job_sample + +from google.cloud import aiplatform + +PROJECT_ID = os.getenv("BUILD_SPECIFIC_GCLOUD_PROJECT") +LOCATION = "us-central1" +MODEL_ID = "5216364637146054656" # Permanent medical entity NL model +DISPLAY_NAME = f"temp_create_batch_prediction_ten_test_{uuid4()}" +GCS_SOURCE_URI = ( + "gs://ucaip-samples-test-output/inputs/batch_predict_TEN/ten_inputs.jsonl" +) +GCS_OUTPUT_URI = "gs://ucaip-samples-test-output/" + + +@pytest.fixture(scope="function") +def shared_state(): + + shared_state = {} + + yield shared_state + + assert "/" in shared_state["batch_prediction_job_name"] + + batch_prediction_job = shared_state["batch_prediction_job_name"].split("/")[-1] + + # Stop the batch prediction job + cancel_batch_prediction_job_sample.cancel_batch_prediction_job_sample( + project=PROJECT_ID, batch_prediction_job_id=batch_prediction_job + ) + + job_client = aiplatform.gapic.JobServiceClient( + client_options={"api_endpoint": "us-central1-aiplatform.googleapis.com"} + ) + + # Waiting for batch prediction job to be in CANCELLED state + helpers.wait_for_job_state( + get_job_method=job_client.get_batch_prediction_job, + name=shared_state["batch_prediction_job_name"], + ) + + # Delete the batch prediction job + delete_batch_prediction_job_sample.delete_batch_prediction_job_sample( + project=PROJECT_ID, batch_prediction_job_id=batch_prediction_job + ) + + +# Creating AutoML Text Entity Extraction batch prediction job +def test_ucaip_generated_create_batch_prediction_ten_sample(capsys, shared_state): + + model_name = f"projects/{PROJECT_ID}/locations/{LOCATION}/models/{MODEL_ID}" + + create_batch_prediction_job_text_entity_extraction_sample.create_batch_prediction_job_text_entity_extraction_sample( + project=PROJECT_ID, + display_name=DISPLAY_NAME, + model=model_name, + gcs_source_uri=GCS_SOURCE_URI, + gcs_destination_output_uri_prefix=GCS_OUTPUT_URI, + ) + + out, _ = capsys.readouterr() + + # Save resource name of the newly created batch prediction job + shared_state["batch_prediction_job_name"] = helpers.get_name(out) diff --git a/samples/snippets/create_batch_prediction_job_text_sentiment_analysis_sample.py b/samples/snippets/create_batch_prediction_job_text_sentiment_analysis_sample.py new file mode 100644 index 0000000000..22bd7a31c4 --- /dev/null +++ b/samples/snippets/create_batch_prediction_job_text_sentiment_analysis_sample.py @@ -0,0 +1,55 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# [START aiplatform_create_batch_prediction_job_text_sentiment_analysis_sample] +from google.cloud import aiplatform +from google.protobuf.struct_pb2 import Value + + +def create_batch_prediction_job_text_sentiment_analysis_sample( + project: str, + display_name: str, + model: str, + gcs_source_uri: str, + gcs_destination_output_uri_prefix: str, + location: str = "us-central1", + api_endpoint: str = "us-central1-aiplatform.googleapis.com", +): + client_options = {"api_endpoint": api_endpoint} + # Initialize client that will be used to create and send requests. + # This client only needs to be created once, and can be reused for multiple requests. + client = aiplatform.gapic.JobServiceClient(client_options=client_options) + + batch_prediction_job = { + "display_name": display_name, + # Format: 'projects/{project}/locations/{location}/models/{model_id}' + "model": model, + "model_parameters": Value(), + "input_config": { + "instances_format": "jsonl", + "gcs_source": {"uris": [gcs_source_uri]}, + }, + "output_config": { + "predictions_format": "jsonl", + "gcs_destination": {"output_uri_prefix": gcs_destination_output_uri_prefix}, + }, + } + parent = f"projects/{project}/locations/{location}" + response = client.create_batch_prediction_job( + parent=parent, batch_prediction_job=batch_prediction_job + ) + print("response:", response) + + +# [END aiplatform_create_batch_prediction_job_text_sentiment_analysis_sample] diff --git a/samples/snippets/create_batch_prediction_job_text_sentiment_analysis_sample_test.py b/samples/snippets/create_batch_prediction_job_text_sentiment_analysis_sample_test.py new file mode 100644 index 0000000000..d3fc196707 --- /dev/null +++ b/samples/snippets/create_batch_prediction_job_text_sentiment_analysis_sample_test.py @@ -0,0 +1,85 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from uuid import uuid4 +import pytest +import os + +import helpers + +import create_batch_prediction_job_text_sentiment_analysis_sample +import cancel_batch_prediction_job_sample +import delete_batch_prediction_job_sample + +from google.cloud import aiplatform + +PROJECT_ID = os.getenv("BUILD_SPECIFIC_GCLOUD_PROJECT") +LOCATION = "us-central1" +MODEL_ID = "4792568875336073216" # Permanent economic sentiment model +DISPLAY_NAME = f"temp_create_batch_prediction_tsn_test_{uuid4()}" +GCS_SOURCE_URI = ( + "gs://ucaip-samples-test-output/inputs/batch_predict_TSN/tsn_inputs.jsonl" +) +GCS_OUTPUT_URI = "gs://ucaip-samples-test-output/" + + +@pytest.fixture(scope="function") +def shared_state(): + + shared_state = {} + + yield shared_state + + assert "/" in shared_state["batch_prediction_job_name"] + + batch_prediction_job = shared_state["batch_prediction_job_name"].split("/")[-1] + + # Stop the batch prediction job + cancel_batch_prediction_job_sample.cancel_batch_prediction_job_sample( + project=PROJECT_ID, batch_prediction_job_id=batch_prediction_job + ) + + job_client = aiplatform.gapic.JobServiceClient( + client_options={"api_endpoint": "us-central1-aiplatform.googleapis.com"} + ) + + # Waiting for batch prediction job to be in CANCELLED state + helpers.wait_for_job_state( + get_job_method=job_client.get_batch_prediction_job, + name=shared_state["batch_prediction_job_name"], + ) + + # Delete the batch prediction job + delete_batch_prediction_job_sample.delete_batch_prediction_job_sample( + project=PROJECT_ID, batch_prediction_job_id=batch_prediction_job + ) + + +# Creating AutoML Text Sentiment Analysis batch prediction job +def test_ucaip_generated_create_batch_prediction_tsn_sample(capsys, shared_state): + + model_name = f"projects/{PROJECT_ID}/locations/{LOCATION}/models/{MODEL_ID}" + + create_batch_prediction_job_text_sentiment_analysis_sample.create_batch_prediction_job_text_sentiment_analysis_sample( + project=PROJECT_ID, + display_name=DISPLAY_NAME, + model=model_name, + gcs_source_uri=GCS_SOURCE_URI, + gcs_destination_output_uri_prefix=GCS_OUTPUT_URI, + ) + + out, _ = capsys.readouterr() + + # Save resource name of the newly created batch prediction job + shared_state["batch_prediction_job_name"] = helpers.get_name(out) diff --git a/samples/snippets/create_batch_prediction_job_video_action_recognition_test.py b/samples/snippets/create_batch_prediction_job_video_action_recognition_test.py index 7a3e07a082..d489cb421d 100644 --- a/samples/snippets/create_batch_prediction_job_video_action_recognition_test.py +++ b/samples/snippets/create_batch_prediction_job_video_action_recognition_test.py @@ -25,11 +25,14 @@ PROJECT_ID = os.getenv("BUILD_SPECIFIC_GCLOUD_PROJECT") LOCATION = "us-central1" MODEL_ID = "3530998029718913024" # permanent_swim_run_videos_action_recognition_model -DISPLAY_NAME = f"temp_create_batch_prediction_job_video_action_recognition_test_{uuid.uuid4()}" +DISPLAY_NAME = ( + f"temp_create_batch_prediction_job_video_action_recognition_test_{uuid.uuid4()}" +) GCS_SOURCE_URI = "gs://automl-video-demo-data/ucaip-var/swimrun_bp.jsonl" GCS_OUTPUT_URI = "gs://ucaip-samples-test-output/" API_ENDPOINT = "us-central1-aiplatform.googleapis.com" + @pytest.fixture def shared_state(): state = {} @@ -39,14 +42,27 @@ def shared_state(): @pytest.fixture def job_client(): client_options = {"api_endpoint": API_ENDPOINT} - job_client = aiplatform.gapic.JobServiceClient( - client_options=client_options) + job_client = aiplatform.gapic.JobServiceClient(client_options=client_options) yield job_client @pytest.fixture(scope="function", autouse=True) def teardown(shared_state, job_client): yield + + # Stop the batch prediction job + # Delete the batch prediction job + job_client.cancel_batch_prediction_job( + name=shared_state["batch_prediction_job_name"] + ) + + # Waiting for batch prediction job to be in CANCELLED state + helpers.wait_for_job_state( + get_job_method=job_client.get_batch_prediction_job, + name=shared_state["batch_prediction_job_name"], + ) + + # Delete the batch prediction job job_client.delete_batch_prediction_job( name=shared_state["batch_prediction_job_name"] ) @@ -57,12 +73,12 @@ def test_create_batch_prediction_job_video_action_recognition_sample( capsys, shared_state, job_client ): - model = f"projects/{PROJECT_ID}/locations/{LOCATION}/models/{MODEL_ID}" + model_name = f"projects/{PROJECT_ID}/locations/{LOCATION}/models/{MODEL_ID}" create_batch_prediction_job_video_action_recognition_sample.create_batch_prediction_job_video_action_recognition_sample( project=PROJECT_ID, display_name=DISPLAY_NAME, - model=model, + model=model_name, gcs_source_uri=GCS_SOURCE_URI, gcs_destination_output_uri_prefix=GCS_OUTPUT_URI, ) @@ -71,12 +87,3 @@ def test_create_batch_prediction_job_video_action_recognition_sample( # Save resource name of the newly created batch prediction job shared_state["batch_prediction_job_name"] = helpers.get_name(out) - - # Waiting for batch prediction job to be in CANCELLED state - helpers.wait_for_job_state( - get_job_method=job_client.get_batch_prediction_job, - name=shared_state["batch_prediction_job_name"], - expected_state="SUCCEEDED", - timeout=600, - freq=20, - ) diff --git a/samples/snippets/create_data_labeling_job_image_segmentation_test.py b/samples/snippets/create_data_labeling_job_image_segmentation_test.py index 79f40d949c..3e78657484 100644 --- a/samples/snippets/create_data_labeling_job_image_segmentation_test.py +++ b/samples/snippets/create_data_labeling_job_image_segmentation_test.py @@ -34,6 +34,7 @@ ANNOTATION_SPEC = {"color": {"red": 1.0}, "displayName": "rose"} ANNOTATION_SET_NAME = f"temp_image_segmentation_{uuid.uuid4()}" + @pytest.fixture def shared_state(): state = {} diff --git a/samples/snippets/create_endpoint_sample_test.py b/samples/snippets/create_endpoint_sample_test.py index 4f66945908..cca027cdfc 100644 --- a/samples/snippets/create_endpoint_sample_test.py +++ b/samples/snippets/create_endpoint_sample_test.py @@ -18,7 +18,8 @@ import helpers -import create_endpoint_sample, delete_endpoint_sample +import create_endpoint_sample +import delete_endpoint_sample DISPLAY_NAME = f"temp_create_endpoint_test_{uuid4()}" PROJECT = os.getenv("BUILD_SPECIFIC_GCLOUD_PROJECT") diff --git a/samples/snippets/create_hyperparameter_tuning_job_python_package_sample_test.py b/samples/snippets/create_hyperparameter_tuning_job_python_package_sample_test.py index 4a328f5170..6ec111bcac 100644 --- a/samples/snippets/create_hyperparameter_tuning_job_python_package_sample_test.py +++ b/samples/snippets/create_hyperparameter_tuning_job_python_package_sample_test.py @@ -34,6 +34,7 @@ PACKAGE_URI = "gs://ucaip-test-us-central1/training/pythonpackages/trainer.tar.bz2" PYTHON_MODULE = "trainer.hptuning_trainer" + @pytest.fixture def shared_state(): state = {} diff --git a/samples/snippets/create_training_pipeline_custom_job_sample.py b/samples/snippets/create_training_pipeline_custom_job_sample.py index b8918f5b09..f9e22bc867 100644 --- a/samples/snippets/create_training_pipeline_custom_job_sample.py +++ b/samples/snippets/create_training_pipeline_custom_job_sample.py @@ -63,7 +63,7 @@ def create_training_pipeline_custom_job_sample( "training_task_inputs": training_task_inputs, "model_to_upload": { "display_name": model_display_name, - "container_spec": {"image_uri": image_uri,}, + "container_spec": {"image_uri": image_uri, }, }, } parent = f"projects/{project}/locations/{location}" diff --git a/samples/snippets/create_training_pipeline_video_action_recognition_test.py b/samples/snippets/create_training_pipeline_video_action_recognition_test.py index b443746d67..ab653a49e1 100644 --- a/samples/snippets/create_training_pipeline_video_action_recognition_test.py +++ b/samples/snippets/create_training_pipeline_video_action_recognition_test.py @@ -24,12 +24,17 @@ LOCATION = "us-central1" PROJECT_ID = os.getenv("BUILD_SPECIFIC_GCLOUD_PROJECT") -DATASET_ID = "6881957627459272704" # permanent_swim_run_videos_action_recognition_dataset -DISPLAY_NAME = f"temp_create_training_pipeline_video_action_recognition_test_{uuid.uuid4()}" +DATASET_ID = ( + "6881957627459272704" # permanent_swim_run_videos_action_recognition_dataset +) +DISPLAY_NAME = ( + f"temp_create_training_pipeline_video_action_recognition_test_{uuid.uuid4()}" +) MODEL_DISPLAY_NAME = f"Temp Model for {DISPLAY_NAME}" MODEL_TYPE = "CLOUD" API_ENDPOINT = "us-central1-aiplatform.googleapis.com" + @pytest.fixture def shared_state(): state = {} @@ -44,27 +49,32 @@ def pipeline_client(): ) yield pipeline_client +@pytest.fixture(scope="function", autouse=True) +def teardown(shared_state, pipeline_client): + yield -@pytest.fixture -def model_client(): - client_options = {"api_endpoint": API_ENDPOINT} - model_client = aiplatform.gapic.ModelServiceClient( - client_options=client_options) - yield model_client + # Stop the training pipeline + pipeline_client.cancel_training_pipeline( + name=shared_state["training_pipeline_name"] + ) + # Waiting for training pipeline to be in CANCELLED state + helpers.wait_for_job_state( + get_job_method=pipeline_client.get_training_pipeline, + name=shared_state["training_pipeline_name"], + ) -@pytest.fixture(scope="function", autouse=True) -def teardown(shared_state, model_client, pipeline_client): - yield - model_client.delete_model(name=shared_state["model_name"]) + # Delete the training pipeline pipeline_client.delete_training_pipeline( name=shared_state["training_pipeline_name"] ) + + # Training AutoML Vision Model def test_create_training_pipeline_video_action_recognition_sample( - capsys, shared_state, pipeline_client + capsys, shared_state ): create_training_pipeline_video_action_recognition_sample.create_training_pipeline_video_action_recognition_sample( project=PROJECT_ID, @@ -75,26 +85,7 @@ def test_create_training_pipeline_video_action_recognition_sample( ) out, _ = capsys.readouterr() - assert "response:" in out # Save resource name of the newly created training pipeline shared_state["training_pipeline_name"] = helpers.get_name(out) - - # Poll until the pipeline succeeds because we want to test the model_upload step as well. - helpers.wait_for_job_state( - get_job_method=pipeline_client.get_training_pipeline, - name=shared_state["training_pipeline_name"], - expected_state="SUCCEEDED", - timeout=5000, - freq=20, - ) - - training_pipeline = pipeline_client.get_training_pipeline( - name=shared_state["training_pipeline_name"] - ) - - # Check that the model indeed has been uploaded. - assert training_pipeline.model_to_upload.name != "" - - shared_state["model_name"] = training_pipeline.model_to_upload.name diff --git a/samples/snippets/deploy_model_sample_test.py b/samples/snippets/deploy_model_sample_test.py index 2960a7f3d3..46f8c03f2e 100644 --- a/samples/snippets/deploy_model_sample_test.py +++ b/samples/snippets/deploy_model_sample_test.py @@ -13,7 +13,8 @@ # limitations under the License. from google.cloud import aiplatform -import deploy_model_sample, delete_endpoint_sample +import deploy_model_sample +import delete_endpoint_sample from uuid import uuid4 import pytest diff --git a/samples/snippets/export_model_video_action_recognition_test.py b/samples/snippets/export_model_video_action_recognition_test.py index 543be7dc47..fabb228147 100644 --- a/samples/snippets/export_model_video_action_recognition_test.py +++ b/samples/snippets/export_model_video_action_recognition_test.py @@ -19,10 +19,15 @@ from google.cloud import storage PROJECT_ID = os.getenv("BUILD_SPECIFIC_GCLOUD_PROJECT") -MODEL_ID = "3422489426196955136" # permanent_swim_run_videos_action_recognition_edge_model -GCS_URI = "gs://ucaip-samples-test-output/tmp/export_model_video_action_recognition_sample" +MODEL_ID = ( + "3422489426196955136" # permanent_swim_run_videos_action_recognition_edge_model +) +GCS_URI = ( + "gs://ucaip-samples-test-output/tmp/export_model_video_action_recognition_sample" +) EXPORT_FORMAT = "tf-saved-model" + @pytest.fixture(scope="function", autouse=True) def teardown(): yield diff --git a/samples/snippets/import_data_text_classification_single_label_sample_test.py b/samples/snippets/import_data_text_classification_single_label_sample_test.py index 6b7dbdd195..1ea0afaab2 100644 --- a/samples/snippets/import_data_text_classification_single_label_sample_test.py +++ b/samples/snippets/import_data_text_classification_single_label_sample_test.py @@ -21,6 +21,8 @@ # Test to assert that the import data function was called. We assert that the function was called # rather than wait for this LRO to complete + + def test_ucaip_generated_import_data_text_classification_single_label_sample(): response = MagicMock() response.next_page_token = b"" diff --git a/samples/snippets/import_data_video_action_recognition_test.py b/samples/snippets/import_data_video_action_recognition_test.py index e62dc1f49f..cacb56de70 100644 --- a/samples/snippets/import_data_video_action_recognition_test.py +++ b/samples/snippets/import_data_video_action_recognition_test.py @@ -25,7 +25,9 @@ PROJECT_ID = os.getenv("BUILD_SPECIFIC_GCLOUD_PROJECT") LOCATION = "us-central1" GCS_SOURCE = "gs://automl-video-demo-data/ucaip-var/swimrun.jsonl" -METADATA_SCHEMA_URI = "gs://google-cloud-aiplatform/schema/dataset/metadata/video_1.0.0.yaml" +METADATA_SCHEMA_URI = ( + "gs://google-cloud-aiplatform/schema/dataset/metadata/video_1.0.0.yaml" +) API_ENDPOINT = "us-central1-aiplatform.googleapis.com" DISPLAY_NAME = f"temp_import_data_video_action_recognition_test_{uuid.uuid4()}" diff --git a/samples/snippets/upload_model_sample_test.py b/samples/snippets/upload_model_sample_test.py index 3814ddd4a7..7cc9635de6 100644 --- a/samples/snippets/upload_model_sample_test.py +++ b/samples/snippets/upload_model_sample_test.py @@ -18,7 +18,8 @@ import helpers -import upload_model_sample, delete_model_sample +import upload_model_sample +import delete_model_sample PROJECT_ID = os.getenv("BUILD_SPECIFIC_GCLOUD_PROJECT") IMAGE_URI = "gcr.io/cloud-ml-service-public/cloud-ml-online-prediction-model-server-cpu:v1_15py3cmle_op_images_20200229_0210_RC00"