Skip to content

Commit

Permalink
Merge branch 'master' into xaitf2
Browse files Browse the repository at this point in the history
  • Loading branch information
ji-yaqi committed Jul 7, 2021
2 parents aa37d3b + 95639ee commit da7ac56
Show file tree
Hide file tree
Showing 35 changed files with 148 additions and 60 deletions.
2 changes: 1 addition & 1 deletion .github/.OwlBot.lock.yaml
@@ -1,3 +1,3 @@
docker:
image: gcr.io/repo-automation-bots/owlbot-python:latest
digest: sha256:b8c131c558606d3cea6e18f8e87befbd448c1482319b0db3c5d5388fa6ea72e3
digest: sha256:99d90d097e4a4710cc8658ee0b5b963f4426d0e424819787c3ac1405c9a26719
14 changes: 6 additions & 8 deletions CONTRIBUTING.rst
Expand Up @@ -68,14 +68,12 @@ Using ``nox``
We use `nox <https://nox.readthedocs.io/en/latest/>`__ to instrument our tests.

- To test your changes, run unit tests with ``nox``::
$ nox -s unit

$ nox -s unit-3.8
$ ...
- To run a single unit test::

- Args to pytest can be passed through the nox command separated by a `--`. For
example, to run a single test::
$ nox -s unit-3.9 -- -k <name of test>

$ nox -s unit-3.8 -- -k <name of test>

.. note::

Expand Down Expand Up @@ -142,7 +140,7 @@ Running System Tests
- To run system tests, you can execute::

# Run all system tests
$ nox -s system-3.8
$ nox -s system

# Run a single system test
$ nox -s system-3.8 -- -k <name of test>
Expand Down Expand Up @@ -215,8 +213,8 @@ Supported versions can be found in our ``noxfile.py`` `config`_.
.. _config: https://github.com/googleapis/python-aiplatform/blob/master/noxfile.py


We also explicitly decided to support Python 3 beginning with version
3.6. Reasons for this include:
We also explicitly decided to support Python 3 beginning with version 3.6.
Reasons for this include:

- Encouraging use of newest versions of Python 3
- Taking the lead of `prominent`_ open-source `projects`_
Expand Down
11 changes: 6 additions & 5 deletions google/cloud/aiplatform/base.py
Expand Up @@ -819,8 +819,9 @@ def _sync_object_with_future_result(
if value:
setattr(self, attribute, value)

@classmethod
def _construct_sdk_resource_from_gapic(
self,
cls,
gapic_resource: proto.Message,
project: Optional[str] = None,
location: Optional[str] = None,
Expand All @@ -846,7 +847,7 @@ def _construct_sdk_resource_from_gapic(
VertexAiResourceNoun:
An initialized SDK object that represents GAPIC type.
"""
sdk_resource = self._empty_constructor(
sdk_resource = cls._empty_constructor(
project=project, location=location, credentials=credentials
)
sdk_resource._gca_resource = gapic_resource
Expand Down Expand Up @@ -894,14 +895,14 @@ def _list(
Returns:
List[VertexAiResourceNoun] - A list of SDK resource objects
"""
self = cls._empty_constructor(
resource = cls._empty_constructor(
project=project, location=location, credentials=credentials
)

# Fetch credentials once and re-use for all `_empty_constructor()` calls
creds = initializer.global_config.credentials

resource_list_method = getattr(self.api_client, self._list_method)
resource_list_method = getattr(resource.api_client, resource._list_method)

list_request = {
"parent": initializer.global_config.common_location_path(
Expand All @@ -916,7 +917,7 @@ def _list(
resource_list = resource_list_method(request=list_request) or []

return [
self._construct_sdk_resource_from_gapic(
cls._construct_sdk_resource_from_gapic(
gapic_resource, project=project, location=location, credentials=creds
)
for gapic_resource in resource_list
Expand Down
44 changes: 42 additions & 2 deletions google/cloud/aiplatform/models.py
Expand Up @@ -116,9 +116,9 @@ def __init__(
resource_name=endpoint_name,
)
self._gca_resource = self._get_gca_resource(resource_name=endpoint_name)

self._prediction_client = self._instantiate_prediction_client(
location=location or initializer.global_config.location,
credentials=credentials,
location=self.location, credentials=credentials,
)

@property
Expand Down Expand Up @@ -324,6 +324,46 @@ def _create(
credentials=credentials,
)

@classmethod
def _construct_sdk_resource_from_gapic(
cls,
gapic_resource: proto.Message,
project: Optional[str] = None,
location: Optional[str] = None,
credentials: Optional[auth_credentials.Credentials] = None,
) -> "Endpoint":
"""Given a GAPIC Endpoint object, return the SDK representation.
Args:
gapic_resource (proto.Message):
A GAPIC representation of a Endpoint resource, usually
retrieved by a get_* or in a list_* API call.
project (str):
Optional. Project to construct Endpoint object from. If not set,
project set in aiplatform.init will be used.
location (str):
Optional. Location to construct Endpoint object from. If not set,
location set in aiplatform.init will be used.
credentials (auth_credentials.Credentials):
Optional. Custom credentials to use to construct Endpoint.
Overrides credentials set in aiplatform.init.
Returns:
Endpoint:
An initialized Endpoint resource.
"""
endpoint = cls._empty_constructor(
project=project, location=location, credentials=credentials
)

endpoint._gca_resource = gapic_resource

endpoint._prediction_client = cls._instantiate_prediction_client(
location=endpoint.location, credentials=credentials,
)

return endpoint

@staticmethod
def _allocate_traffic(
traffic_split: Dict[str, int], traffic_percentage: int,
Expand Down
Expand Up @@ -34,6 +34,7 @@ def teardown(teardown_batch_prediction_job):
yield


@pytest.mark.skip(reason="https://github.com/googleapis/java-aiplatform/issues/420")
# Creating AutoML Tabular Forecasting Classification batch prediction job
def test_create_batch_prediction_job_tabular_forecasting_sample(capsys, shared_state):

Expand Down
Expand Up @@ -35,6 +35,7 @@ def teardown(teardown_batch_prediction_job):
yield


@pytest.mark.skip(reason="https://github.com/googleapis/java-aiplatform/issues/420")
# Creating AutoML Text Classification batch prediction job
def test_ucaip_generated_create_batch_prediction_tcn_sample(capsys, shared_state):

Expand Down
Expand Up @@ -35,6 +35,7 @@ def teardown(teardown_batch_prediction_job):
yield


@pytest.mark.skip(reason="https://github.com/googleapis/java-aiplatform/issues/420")
# Creating AutoML Text Entity Extraction batch prediction job
def test_ucaip_generated_create_batch_prediction_ten_sample(capsys, shared_state):

Expand Down
Expand Up @@ -35,6 +35,7 @@ def teardown(teardown_batch_prediction_job):
yield


@pytest.mark.skip(reason="https://github.com/googleapis/java-aiplatform/issues/420")
# Creating AutoML Text Sentiment Analysis batch prediction job
def test_ucaip_generated_create_batch_prediction_tsn_sample(capsys, shared_state):

Expand Down
Expand Up @@ -35,6 +35,7 @@ def teardown(teardown_batch_prediction_job):
yield


@pytest.mark.skip(reason="https://github.com/googleapis/java-aiplatform/issues/420")
# Creating AutoML Video Object Tracking batch prediction job
def test_create_batch_prediction_job_video_action_recognition_sample(
capsys, shared_state, job_client
Expand Down
Expand Up @@ -35,6 +35,7 @@ def teardown(teardown_batch_prediction_job):
yield


@pytest.mark.skip(reason="https://github.com/googleapis/java-aiplatform/issues/420")
# Creating AutoML Video Classification batch prediction job
def test_ucaip_generated_create_batch_prediction_vcn_sample(capsys, shared_state):

Expand Down
Expand Up @@ -35,6 +35,7 @@ def teardown(teardown_batch_prediction_job):
yield


@pytest.mark.skip(reason="https://github.com/googleapis/java-aiplatform/issues/420")
# Creating AutoML Video Object Tracking batch prediction job
def test_ucaip_generated_create_batch_prediction_vcn_sample(capsys, shared_state):

Expand Down
1 change: 1 addition & 0 deletions samples/snippets/create_dataset_image_sample_test.py
Expand Up @@ -28,6 +28,7 @@ def teardown(teardown_dataset):
yield


@pytest.mark.skip(reason="https://github.com/googleapis/java-aiplatform/issues/420")
def test_ucaip_generated_create_dataset_image(capsys, shared_state):
create_dataset_image_sample.create_dataset_image_sample(
display_name=f"temp_create_dataset_image_test_{uuid4()}", project=PROJECT_ID
Expand Down
1 change: 1 addition & 0 deletions samples/snippets/create_dataset_tabular_gcs_sample_test.py
Expand Up @@ -29,6 +29,7 @@ def teardown(teardown_dataset):
yield


@pytest.mark.skip(reason="https://github.com/googleapis/java-aiplatform/issues/420")
def test_ucaip_generated_create_dataset_tabular_gcs(capsys, shared_state):
create_dataset_tabular_gcs_sample.create_dataset_tabular_gcs_sample(
display_name=f"temp_create_dataset_test_{uuid4()}",
Expand Down
1 change: 1 addition & 0 deletions samples/snippets/create_dataset_video_sample_test.py
Expand Up @@ -31,6 +31,7 @@ def teardown(teardown_dataset):
yield


@pytest.mark.skip(reason="https://github.com/googleapis/java-aiplatform/issues/420")
def test_ucaip_generated_create_dataset_video_sample_vision(capsys, shared_state):
create_dataset_video_sample.create_dataset_video_sample(
display_name=f"temp_create_dataset_test_{uuid4()}", project=PROJECT_ID
Expand Down
Expand Up @@ -35,6 +35,7 @@ def teardown(teardown_hyperparameter_tuning_job):
yield


@pytest.mark.skip(reason="https://github.com/googleapis/java-aiplatform/issues/420")
def test_create_hyperparameter_tuning_job_python_package_sample(capsys, shared_state):

create_hyperparameter_tuning_job_python_package_sample.create_hyperparameter_tuning_job_python_package_sample(
Expand Down
Expand Up @@ -29,6 +29,7 @@ def teardown(teardown_training_pipeline):
yield


@pytest.mark.skip(reason="https://github.com/googleapis/java-aiplatform/issues/420")
def test_ucaip_generated_create_training_pipeline_custom_job_sample(
capsys, shared_state
):
Expand Down
Expand Up @@ -42,6 +42,7 @@ def teardown(teardown_training_pipeline):
yield


@pytest.mark.skip(reason="https://github.com/googleapis/java-aiplatform/issues/420")
def test_create_training_pipeline_custom_training_managed_dataset_sample(
capsys, shared_state, pipeline_client
):
Expand Down
Expand Up @@ -30,6 +30,7 @@ def teardown(teardown_training_pipeline):
yield


@pytest.mark.skip(reason="https://github.com/googleapis/java-aiplatform/issues/420")
def test_ucaip_generated_create_training_pipeline_video_classification_sample(
capsys, shared_state
):
Expand Down
Expand Up @@ -30,6 +30,7 @@ def teardown(teardown_training_pipeline):
yield


@pytest.mark.skip(reason="https://github.com/googleapis/java-aiplatform/issues/420")
def test_ucaip_generated_create_training_pipeline_image_object_dectection(
capsys, shared_state
):
Expand Down
Expand Up @@ -32,6 +32,7 @@ def teardown(teardown_training_pipeline):
yield


@pytest.mark.skip(reason="https://github.com/googleapis/java-aiplatform/issues/420")
def test_ucaip_generated_create_training_pipeline_sample(capsys, shared_state):

create_training_pipeline_tabular_classification_sample.create_training_pipeline_tabular_classification_sample(
Expand Down
Expand Up @@ -64,6 +64,7 @@ def teardown(shared_state):
)


@pytest.mark.skip(reason="https://github.com/googleapis/java-aiplatform/issues/420")
def test_ucaip_generated_create_training_pipeline_sample(capsys, shared_state):

create_training_pipeline_tabular_forecasting_sample.create_training_pipeline_tabular_forecasting_sample(
Expand Down
Expand Up @@ -30,6 +30,7 @@ def teardown(teardown_training_pipeline):
yield


@pytest.mark.skip(reason="https://github.com/googleapis/java-aiplatform/issues/420")
# Training Text Sentiment Analysis Model
def test_ucaip_generated_create_training_pipeline_text_sentiment_analysis_sample(
capsys, shared_state
Expand Down
Expand Up @@ -37,6 +37,7 @@ def teardown(teardown_training_pipeline):
yield


@pytest.mark.skip(reason="https://github.com/googleapis/java-aiplatform/issues/420")
# Training AutoML Vision Model
def test_create_training_pipeline_video_action_recognition_sample(capsys, shared_state):
create_training_pipeline_video_action_recognition_sample.create_training_pipeline_video_action_recognition_sample(
Expand Down
Expand Up @@ -30,6 +30,7 @@ def teardown(teardown_training_pipeline):
yield


@pytest.mark.skip(reason="https://github.com/googleapis/java-aiplatform/issues/420")
# Training AutoML Vision Model
def test_ucaip_generated_create_training_pipeline_video_classification_sample(
capsys, shared_state
Expand Down
Expand Up @@ -38,6 +38,7 @@ def teardown(teardown_endpoint):
yield


@pytest.mark.skip(reason="https://github.com/googleapis/java-aiplatform/issues/420")
def test_ucaip_generated_deploy_model_custom_trained_model_sample(capsys, shared_state):

assert shared_state["endpoint_name"] is not None
Expand Down
Expand Up @@ -35,6 +35,7 @@ def teardown(storage_client):
blob.delete()


@pytest.mark.skip(reason="https://github.com/googleapis/java-aiplatform/issues/420")
def test_ucaip_generated_export_model_tabular_classification_sample(capsys):
export_model_tabular_classification_sample.export_model_tabular_classification_sample(
project=PROJECT_ID,
Expand Down
Expand Up @@ -38,6 +38,7 @@ def teardown(storage_client):
blob.delete()


@pytest.mark.skip(reason="https://github.com/googleapis/java-aiplatform/issues/420")
def test_export_model_video_action_recognition_sample(capsys):
export_model_video_action_recognition_sample.export_model_video_action_recognition_sample(
project=PROJECT_ID,
Expand Down
Expand Up @@ -24,6 +24,7 @@ def teardown(teardown_dataset):
yield


@pytest.mark.skip(reason="https://github.com/googleapis/java-aiplatform/issues/420")
def test_ucaip_generated_import_data_text_entity_extraction_sample(
capsys, shared_state
):
Expand Down
Expand Up @@ -38,6 +38,7 @@ def teardown(teardown_dataset):
yield


@pytest.mark.skip(reason="https://github.com/googleapis/java-aiplatform/issues/420")
def test_import_data_video_action_recognition_sample(
capsys, shared_state, dataset_client
):
Expand Down
Expand Up @@ -38,6 +38,7 @@ def teardown(teardown_dataset):
yield


@pytest.mark.skip(reason="https://github.com/googleapis/java-aiplatform/issues/420")
def test_ucaip_generated_import_data_video_classification_sample_single_label_image(
capsys, shared_state
):
Expand Down
2 changes: 1 addition & 1 deletion samples/snippets/requirements.txt
@@ -1,3 +1,3 @@
pytest==6.2.4
google-cloud-storage>=1.26.0, <2.0.0dev
google-cloud-aiplatform==1.1.0
google-cloud-aiplatform==1.1.1
Expand Up @@ -36,6 +36,7 @@ def teardown(teardown_model):
yield


@pytest.mark.skip(reason="https://github.com/googleapis/java-aiplatform/issues/420")
def test_ucaip_generated_upload_model_explain_image_managed_container_sample(capsys, shared_state):

upload_model_explain_image_managed_container_sample.upload_model_explain_image_managed_container_sample(
Expand Down
Expand Up @@ -36,6 +36,7 @@ def teardown(teardown_model):
yield


@pytest.mark.skip(reason="https://github.com/googleapis/java-aiplatform/issues/420")
def test_ucaip_generated_upload_model_explain_tabular_managed_constainer_sample(capsys, shared_state):

upload_model_explain_tabular_managed_container_sample.upload_model_explain_tabular_managed_container_sample(
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Expand Up @@ -39,7 +39,7 @@
full_extra_require = list(
set(tensorboard_extra_require + metadata_extra_require + xai_extra_require)
)
testing_extra_require = full_extra_require + ["grpcio-testing ~= 1.34.0"]
testing_extra_require = full_extra_require + ["grpcio-testing"]


setuptools.setup(
Expand Down

0 comments on commit da7ac56

Please sign in to comment.