From 11cf08a4550e6f506f84acffd3b253de379272fb Mon Sep 17 00:00:00 2001 From: Lo Ferris Date: Thu, 1 Jul 2021 17:18:19 +0000 Subject: [PATCH 01/15] first steps in adding sample --- samples/snippets/create_cluster.py | 10 ++-- .../instantiate_inline_workflow_template.py | 8 +-- samples/snippets/list_clusters.py | 6 +- samples/snippets/noxfile.py | 41 +++++++------- samples/snippets/submit_job.py | 24 ++++---- samples/snippets/submit_job_test.py | 42 +++++++------- samples/snippets/submit_job_to_cluster.py | 12 ++-- samples/snippets/update_cluster.py | 55 +++++++++++++++++++ samples/snippets/update_cluster_test.py | 0 9 files changed, 131 insertions(+), 67 deletions(-) create mode 100644 samples/snippets/update_cluster.py create mode 100644 samples/snippets/update_cluster_test.py diff --git a/samples/snippets/create_cluster.py b/samples/snippets/create_cluster.py index f4fee7d8..633b59e8 100644 --- a/samples/snippets/create_cluster.py +++ b/samples/snippets/create_cluster.py @@ -29,12 +29,12 @@ def create_cluster(project_id, region, cluster_name): """This sample walks a user through creating a Cloud Dataproc cluster - using the Python client library. + using the Python client library. - Args: - project_id (string): Project to use for creating resources. - region (string): Region where the resources should live. - cluster_name (string): Name to use for creating a cluster. + Args: + project_id (string): Project to use for creating resources. + region (string): Region where the resources should live. + cluster_name (string): Name to use for creating a cluster. """ # Create a client with the endpoint set to the desired cluster region. diff --git a/samples/snippets/instantiate_inline_workflow_template.py b/samples/snippets/instantiate_inline_workflow_template.py index b3a40d13..cbb1a218 100644 --- a/samples/snippets/instantiate_inline_workflow_template.py +++ b/samples/snippets/instantiate_inline_workflow_template.py @@ -27,11 +27,11 @@ def instantiate_inline_workflow_template(project_id, region): """This sample walks a user through submitting a workflow - for a Cloud Dataproc using the Python client library. + for a Cloud Dataproc using the Python client library. - Args: - project_id (string): Project to use for running the workflow. - region (string): Region where the workflow resources should live. + Args: + project_id (string): Project to use for running the workflow. + region (string): Region where the workflow resources should live. """ # Create a client with the endpoint set to the desired region. diff --git a/samples/snippets/list_clusters.py b/samples/snippets/list_clusters.py index f0e7bac3..837013dd 100644 --- a/samples/snippets/list_clusters.py +++ b/samples/snippets/list_clusters.py @@ -49,8 +49,10 @@ def main(project_id, region): else: # Use a regional gRPC endpoint. See: # https://cloud.google.com/dataproc/docs/concepts/regional-endpoints - client_transport = cluster_controller_grpc_transport.ClusterControllerGrpcTransport( - address="{}-dataproc.googleapis.com:443".format(region) + client_transport = ( + cluster_controller_grpc_transport.ClusterControllerGrpcTransport( + address="{}-dataproc.googleapis.com:443".format(region) + ) ) dataproc_cluster_client = dataproc_v1.ClusterControllerClient(client_transport) diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py index 5ff9e1db..b3c8658a 100644 --- a/samples/snippets/noxfile.py +++ b/samples/snippets/noxfile.py @@ -38,17 +38,15 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - 'ignored_versions': ["2.7"], - + "ignored_versions": ["2.7"], # Old samples are opted out of enforcing Python type hints # All new samples should feature them - 'enforce_type_hints': False, - + "enforce_type_hints": False, # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', # If you need to use a specific version of pip, # change pip_version_override to the string representation @@ -56,13 +54,13 @@ "pip_version_override": None, # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - 'envs': {}, + "envs": {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append('.') + sys.path.append(".") from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -77,12 +75,12 @@ def get_pytest_env_vars() -> Dict[str, str]: ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG['gcloud_project_env'] + env_key = TEST_CONFIG["gcloud_project_env"] # This should error out if not set. - ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG['envs']) + ret.update(TEST_CONFIG["envs"]) return ret @@ -91,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) @@ -140,7 +138,7 @@ def _determine_local_import_names(start_dir: str) -> List[str]: @nox.session def lint(session: nox.sessions.Session) -> None: - if not TEST_CONFIG['enforce_type_hints']: + if not TEST_CONFIG["enforce_type_hints"]: session.install("flake8", "flake8-import-order") else: session.install("flake8", "flake8-import-order", "flake8-annotations") @@ -149,9 +147,11 @@ def lint(session: nox.sessions.Session) -> None: args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - "." + ".", ] session.run("flake8", *args) + + # # Black # @@ -164,6 +164,7 @@ def blacken(session: nox.sessions.Session) -> None: session.run("black", *python_files) + # # Sample Tests # @@ -172,7 +173,9 @@ def blacken(session: nox.sessions.Session) -> None: PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: if TEST_CONFIG["pip_version_override"]: pip_version = TEST_CONFIG["pip_version_override"] session.install(f"pip=={pip_version}") @@ -202,7 +205,7 @@ def _session_tests(session: nox.sessions.Session, post_install: Callable = None) # on travis where slow and flaky tests are excluded. # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html success_codes=[0, 5], - env=get_pytest_env_vars() + env=get_pytest_env_vars(), ) @@ -212,9 +215,9 @@ def py(session: nox.sessions.Session) -> None: if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip("SKIPPED: {} tests are disabled for this sample.".format( - session.python - )) + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) # @@ -223,7 +226,7 @@ def py(session: nox.sessions.Session) -> None: def _get_repo_root() -> Optional[str]: - """ Returns the root folder of the project. """ + """Returns the root folder of the project.""" # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) for i in range(10): diff --git a/samples/snippets/submit_job.py b/samples/snippets/submit_job.py index b70348c3..d7761b73 100644 --- a/samples/snippets/submit_job.py +++ b/samples/snippets/submit_job.py @@ -23,8 +23,10 @@ # [START dataproc_submit_job] import re + # [END dataproc_submit_job] import sys + # [START dataproc_submit_job] from google.cloud import dataproc_v1 as dataproc @@ -33,21 +35,19 @@ def submit_job(project_id, region, cluster_name): # Create the job client. - job_client = dataproc.JobControllerClient(client_options={ - 'api_endpoint': '{}-dataproc.googleapis.com:443'.format(region) - }) + job_client = dataproc.JobControllerClient( + client_options={"api_endpoint": "{}-dataproc.googleapis.com:443".format(region)} + ) # Create the job config. 'main_jar_file_uri' can also be a # Google Cloud Storage URL. job = { - 'placement': { - 'cluster_name': cluster_name + "placement": {"cluster_name": cluster_name}, + "spark_job": { + "main_class": "org.apache.spark.examples.SparkPi", + "jar_file_uris": ["file:///usr/lib/spark/examples/jars/spark-examples.jar"], + "args": ["1000"], }, - 'spark_job': { - 'main_class': 'org.apache.spark.examples.SparkPi', - 'jar_file_uris': ['file:///usr/lib/spark/examples/jars/spark-examples.jar'], - 'args': ['1000'] - } } operation = job_client.submit_job_as_operation( @@ -67,12 +67,14 @@ def submit_job(project_id, region, cluster_name): ) print(f"Job finished successfully: {output}") + + # [END dataproc_submit_job] if __name__ == "__main__": if len(sys.argv) < 3: - sys.exit('python submit_job.py project_id region cluster_name') + sys.exit("python submit_job.py project_id region cluster_name") project_id = sys.argv[1] region = sys.argv[2] diff --git a/samples/snippets/submit_job_test.py b/samples/snippets/submit_job_test.py index 326b38d5..6827916f 100644 --- a/samples/snippets/submit_job_test.py +++ b/samples/snippets/submit_job_test.py @@ -21,30 +21,24 @@ import submit_job -PROJECT_ID = os.environ['GOOGLE_CLOUD_PROJECT'] -REGION = 'us-central1' -CLUSTER_NAME = 'py-sj-test-{}'.format(str(uuid.uuid4())) +PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"] +REGION = "us-central1" +CLUSTER_NAME = "py-sj-test-{}".format(str(uuid.uuid4())) CLUSTER = { - 'project_id': PROJECT_ID, - 'cluster_name': CLUSTER_NAME, - 'config': { - 'master_config': { - 'num_instances': 1, - 'machine_type_uri': 'n1-standard-2' - }, - 'worker_config': { - 'num_instances': 2, - 'machine_type_uri': 'n1-standard-2' - } - } + "project_id": PROJECT_ID, + "cluster_name": CLUSTER_NAME, + "config": { + "master_config": {"num_instances": 1, "machine_type_uri": "n1-standard-2"}, + "worker_config": {"num_instances": 2, "machine_type_uri": "n1-standard-2"}, + }, } @pytest.fixture(autouse=True) def setup_teardown(): - cluster_client = dataproc.ClusterControllerClient(client_options={ - 'api_endpoint': '{}-dataproc.googleapis.com:443'.format(REGION) - }) + cluster_client = dataproc.ClusterControllerClient( + client_options={"api_endpoint": "{}-dataproc.googleapis.com:443".format(REGION)} + ) # Create the cluster. operation = cluster_client.create_cluster( @@ -54,13 +48,17 @@ def setup_teardown(): yield - cluster_client.delete_cluster(request={ - "project_id": PROJECT_ID, "region": REGION, "cluster_name": CLUSTER_NAME - }) + cluster_client.delete_cluster( + request={ + "project_id": PROJECT_ID, + "region": REGION, + "cluster_name": CLUSTER_NAME, + } + ) def test_submit_job(capsys): submit_job.submit_job(PROJECT_ID, REGION, CLUSTER_NAME) out, _ = capsys.readouterr() - assert 'Job finished successfully' in out + assert "Job finished successfully" in out diff --git a/samples/snippets/submit_job_to_cluster.py b/samples/snippets/submit_job_to_cluster.py index d613cf5b..5fa3ccd5 100644 --- a/samples/snippets/submit_job_to_cluster.py +++ b/samples/snippets/submit_job_to_cluster.py @@ -77,8 +77,10 @@ def download_output(project, cluster_id, output_bucket, job_id): print("Downloading output file.") client = storage.Client(project=project) bucket = client.get_bucket(output_bucket) - output_blob = "google-cloud-dataproc-metainfo/{}/jobs/{}/driveroutput.000000000".format( - cluster_id, job_id + output_blob = ( + "google-cloud-dataproc-metainfo/{}/jobs/{}/driveroutput.000000000".format( + cluster_id, job_id + ) ) return bucket.blob(output_blob).download_as_string() @@ -230,8 +232,10 @@ def main( region = get_region_from_zone(zone) # Use a regional gRPC endpoint. See: # https://cloud.google.com/dataproc/docs/concepts/regional-endpoints - client_transport = cluster_controller_grpc_transport.ClusterControllerGrpcTransport( - address="{}-dataproc.googleapis.com:443".format(region) + client_transport = ( + cluster_controller_grpc_transport.ClusterControllerGrpcTransport( + address="{}-dataproc.googleapis.com:443".format(region) + ) ) job_transport = job_controller_grpc_transport.JobControllerGrpcTransport( address="{}-dataproc.googleapis.com:443".format(region) diff --git a/samples/snippets/update_cluster.py b/samples/snippets/update_cluster.py new file mode 100644 index 00000000..b72c4307 --- /dev/null +++ b/samples/snippets/update_cluster.py @@ -0,0 +1,55 @@ +#!/usr/bin/env python +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START dataproc_update_cluster] +from google.cloud import dataproc_v1 as dataproc + + +def update_cluster(project_id, region, cluster_name): + """Specify client with desired cluster""" + client = dataproc.ClusterControllerClient( + client_options={"api_endpoint": f"{region}-dataproc.googleapis.com:443"} + ) + # Get cluster + cluster = client.get_cluster( + project_id=project_id, region=region, cluster_name=cluster_name + ) + # Update number of clusters + + new_num_instances = cluster.config.worker_config.num_instances * 2 + mask = {"paths": {"config.worker_config.num_instances": str(new_num_instances)}} + # Update cluster config + cluster.config.worker_config.num_instances = new_num_instances + # Update cluster + operation = client.update_cluster( + project_id=project_id, + region=region, + cluster=cluster, + cluster_name=cluster_name, + update_mask=mask, + ) + # Return result of operation + updated_cluster = operation.result() + print(f"result was: {updated_cluster}") + + +if __name__ == "__main__": + if len(sys.argv) < 4: + sys.exit("python update_cluster.py project_id region cluster_name") + + project_id = sys.argv[1] + region = sys.argv[2] + cluster_name = sys.argv[3] + update_cluster(project_id, region, cluster_name) diff --git a/samples/snippets/update_cluster_test.py b/samples/snippets/update_cluster_test.py new file mode 100644 index 00000000..e69de29b From 205a4d0ba0666cc5c1feb7c90e4ed397740f540e Mon Sep 17 00:00:00 2001 From: Lo Ferris Date: Thu, 1 Jul 2021 17:07:24 -0700 Subject: [PATCH 02/15] consistent formatting with create_cluster.py --- samples/snippets/update_cluster.py | 22 +++++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/samples/snippets/update_cluster.py b/samples/snippets/update_cluster.py index b72c4307..f31e3d58 100644 --- a/samples/snippets/update_cluster.py +++ b/samples/snippets/update_cluster.py @@ -11,6 +11,12 @@ # See the License for the specific language governing permissions and # limitations under the License. +# This sample walks a user through updating a Cloud Dataproc cluster using +# the Python client library. +# +# This script can be run on its own: +# python update_cluster.py ${PROJECT_ID} ${REGION} ${CLUSTER_NAME} + import sys # [START dataproc_update_cluster] @@ -18,16 +24,22 @@ def update_cluster(project_id, region, cluster_name): - """Specify client with desired cluster""" + """This sample walks a user through updating a Cloud Dataproc cluster + using the Python client library. + Args: + project_id (string): Project to use for creating resources. + region (string): Region where the resources should live. + cluster_name (string): Name to use for creating a cluster. + """ + # Create a client with the endpoint set to the desired cluster region. client = dataproc.ClusterControllerClient( client_options={"api_endpoint": f"{region}-dataproc.googleapis.com:443"} ) - # Get cluster + # Get cluster you wish to update. cluster = client.get_cluster( project_id=project_id, region=region, cluster_name=cluster_name ) # Update number of clusters - new_num_instances = cluster.config.worker_config.num_instances * 2 mask = {"paths": {"config.worker_config.num_instances": str(new_num_instances)}} # Update cluster config @@ -40,9 +52,9 @@ def update_cluster(project_id, region, cluster_name): cluster_name=cluster_name, update_mask=mask, ) - # Return result of operation + # Output a success message. updated_cluster = operation.result() - print(f"result was: {updated_cluster}") + print(f"Cluster was updated successfully: {updated_cluster.cluster_name}") if __name__ == "__main__": From 7b62aa77a2adee7cfb979fc0d512b7c75079ed9e Mon Sep 17 00:00:00 2001 From: Lo Ferris Date: Thu, 1 Jul 2021 17:33:08 -0700 Subject: [PATCH 03/15] test first draft --- samples/snippets/update_cluster_test.py | 56 +++++++++++++++++++++++++ 1 file changed, 56 insertions(+) diff --git a/samples/snippets/update_cluster_test.py b/samples/snippets/update_cluster_test.py index e69de29b..ac86a23b 100644 --- a/samples/snippets/update_cluster_test.py +++ b/samples/snippets/update_cluster_test.py @@ -0,0 +1,56 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.cloud.dataproc_v1.types.clusters import GceClusterConfig +import os +import uuid + +from google.cloud import dataproc_v1 as dataproc +import pytest + +import create_cluster +import update_cluster + + +PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"] +REGION = "us-central1" +CLUSTER_NAME = "py-cc-test-{}".format(str(uuid.uuid4())) + + +@pytest.fixture(autouse=True) +def teardown(): + yield + + cluster_client = dataproc.ClusterControllerClient( + client_options={"api_endpoint": f"{REGION}-dataproc.googleapis.com:443"} + ) + # Client library function + operation = cluster_client.delete_cluster( + request={ + "project_id": PROJECT_ID, + "region": REGION, + "cluster_name": CLUSTER_NAME, + } + ) + # Wait for cluster to delete + operation.result() + + +def test_update_cluster(capsys): + # Wrapper function for client library function + create_cluster.create_cluster(PROJECT_ID, REGION, CLUSTER_NAME) + update_cluster.update_cluster(PROJECT_ID, REGION, CLUSTER_NAME) + + out, _ = capsys.readouterr() + assert CLUSTER_NAME in out From dac96637dc06da9e4c9f2a005c6b8e2883eaf9ed Mon Sep 17 00:00:00 2001 From: Lo Ferris Date: Fri, 2 Jul 2021 11:33:13 -0700 Subject: [PATCH 04/15] update_cluster sample complete --- samples/snippets/update_cluster.py | 6 +++--- samples/snippets/update_cluster_test.py | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/samples/snippets/update_cluster.py b/samples/snippets/update_cluster.py index f31e3d58..4cd9bd77 100644 --- a/samples/snippets/update_cluster.py +++ b/samples/snippets/update_cluster.py @@ -23,7 +23,7 @@ from google.cloud import dataproc_v1 as dataproc -def update_cluster(project_id, region, cluster_name): +def update_cluster(project_id, region, cluster_name, new_num_instances): """This sample walks a user through updating a Cloud Dataproc cluster using the Python client library. Args: @@ -40,7 +40,6 @@ def update_cluster(project_id, region, cluster_name): project_id=project_id, region=region, cluster_name=cluster_name ) # Update number of clusters - new_num_instances = cluster.config.worker_config.num_instances * 2 mask = {"paths": {"config.worker_config.num_instances": str(new_num_instances)}} # Update cluster config cluster.config.worker_config.num_instances = new_num_instances @@ -58,10 +57,11 @@ def update_cluster(project_id, region, cluster_name): if __name__ == "__main__": - if len(sys.argv) < 4: + if len(sys.argv) < 5: sys.exit("python update_cluster.py project_id region cluster_name") project_id = sys.argv[1] region = sys.argv[2] cluster_name = sys.argv[3] + new_num_instances = sys.argv[4] update_cluster(project_id, region, cluster_name) diff --git a/samples/snippets/update_cluster_test.py b/samples/snippets/update_cluster_test.py index ac86a23b..8e5c3090 100644 --- a/samples/snippets/update_cluster_test.py +++ b/samples/snippets/update_cluster_test.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from google.cloud.dataproc_v1.types.clusters import GceClusterConfig import os import uuid @@ -26,6 +25,7 @@ PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"] REGION = "us-central1" CLUSTER_NAME = "py-cc-test-{}".format(str(uuid.uuid4())) +NEW_NUM_INSTANCES = 5 @pytest.fixture(autouse=True) @@ -50,7 +50,7 @@ def teardown(): def test_update_cluster(capsys): # Wrapper function for client library function create_cluster.create_cluster(PROJECT_ID, REGION, CLUSTER_NAME) - update_cluster.update_cluster(PROJECT_ID, REGION, CLUSTER_NAME) + update_cluster.update_cluster(PROJECT_ID, REGION, CLUSTER_NAME, NEW_NUM_INSTANCES) out, _ = capsys.readouterr() assert CLUSTER_NAME in out From 632fd09f4bbcbe7eee97782d48080d32bc25e1e1 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Fri, 2 Jul 2021 18:40:00 +0000 Subject: [PATCH 05/15] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/master/packages/owl-bot/README.md --- samples/snippets/noxfile.py | 41 +++++++++++++++++-------------------- 1 file changed, 19 insertions(+), 22 deletions(-) diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py index b3c8658a..5ff9e1db 100644 --- a/samples/snippets/noxfile.py +++ b/samples/snippets/noxfile.py @@ -38,15 +38,17 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - "ignored_versions": ["2.7"], + 'ignored_versions': ["2.7"], + # Old samples are opted out of enforcing Python type hints # All new samples should feature them - "enforce_type_hints": False, + 'enforce_type_hints': False, + # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', # If you need to use a specific version of pip, # change pip_version_override to the string representation @@ -54,13 +56,13 @@ "pip_version_override": None, # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - "envs": {}, + 'envs': {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append(".") + sys.path.append('.') from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -75,12 +77,12 @@ def get_pytest_env_vars() -> Dict[str, str]: ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG["gcloud_project_env"] + env_key = TEST_CONFIG['gcloud_project_env'] # This should error out if not set. - ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] + ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG["envs"]) + ret.update(TEST_CONFIG['envs']) return ret @@ -89,7 +91,7 @@ def get_pytest_env_vars() -> Dict[str, str]: ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] +IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) @@ -138,7 +140,7 @@ def _determine_local_import_names(start_dir: str) -> List[str]: @nox.session def lint(session: nox.sessions.Session) -> None: - if not TEST_CONFIG["enforce_type_hints"]: + if not TEST_CONFIG['enforce_type_hints']: session.install("flake8", "flake8-import-order") else: session.install("flake8", "flake8-import-order", "flake8-annotations") @@ -147,11 +149,9 @@ def lint(session: nox.sessions.Session) -> None: args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - ".", + "." ] session.run("flake8", *args) - - # # Black # @@ -164,7 +164,6 @@ def blacken(session: nox.sessions.Session) -> None: session.run("black", *python_files) - # # Sample Tests # @@ -173,9 +172,7 @@ def blacken(session: nox.sessions.Session) -> None: PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests( - session: nox.sessions.Session, post_install: Callable = None -) -> None: +def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: if TEST_CONFIG["pip_version_override"]: pip_version = TEST_CONFIG["pip_version_override"] session.install(f"pip=={pip_version}") @@ -205,7 +202,7 @@ def _session_tests( # on travis where slow and flaky tests are excluded. # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html success_codes=[0, 5], - env=get_pytest_env_vars(), + env=get_pytest_env_vars() ) @@ -215,9 +212,9 @@ def py(session: nox.sessions.Session) -> None: if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip( - "SKIPPED: {} tests are disabled for this sample.".format(session.python) - ) + session.skip("SKIPPED: {} tests are disabled for this sample.".format( + session.python + )) # @@ -226,7 +223,7 @@ def py(session: nox.sessions.Session) -> None: def _get_repo_root() -> Optional[str]: - """Returns the root folder of the project.""" + """ Returns the root folder of the project. """ # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) for i in range(10): From 5be2ed96d8bd9f763773b737247cbf81ae18273b Mon Sep 17 00:00:00 2001 From: Lo Ferris Date: Fri, 23 Jul 2021 15:43:15 -0700 Subject: [PATCH 06/15] docs: add update cluster sample - fixing formatting --- samples/snippets/update_cluster.py | 23 ++++++++++++++++------- samples/snippets/update_cluster_test.py | 9 ++++++++- 2 files changed, 24 insertions(+), 8 deletions(-) diff --git a/samples/snippets/update_cluster.py b/samples/snippets/update_cluster.py index 4cd9bd77..d477c4a0 100644 --- a/samples/snippets/update_cluster.py +++ b/samples/snippets/update_cluster.py @@ -1,9 +1,10 @@ -#!/usr/bin/env python +# Copyright 2021 Google LLC +# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -11,11 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -# This sample walks a user through updating a Cloud Dataproc cluster using -# the Python client library. -# -# This script can be run on its own: -# python update_cluster.py ${PROJECT_ID} ${REGION} ${CLUSTER_NAME} +# This sample walks a user through updating the number of clusters using the Dataproc +# client library. + +# Usage: +# python update_cluster.py --project_id --region --cluster_name import sys @@ -31,18 +32,23 @@ def update_cluster(project_id, region, cluster_name, new_num_instances): region (string): Region where the resources should live. cluster_name (string): Name to use for creating a cluster. """ + # Create a client with the endpoint set to the desired cluster region. client = dataproc.ClusterControllerClient( client_options={"api_endpoint": f"{region}-dataproc.googleapis.com:443"} ) + # Get cluster you wish to update. cluster = client.get_cluster( project_id=project_id, region=region, cluster_name=cluster_name ) + # Update number of clusters mask = {"paths": {"config.worker_config.num_instances": str(new_num_instances)}} + # Update cluster config cluster.config.worker_config.num_instances = new_num_instances + # Update cluster operation = client.update_cluster( project_id=project_id, @@ -51,9 +57,12 @@ def update_cluster(project_id, region, cluster_name, new_num_instances): cluster_name=cluster_name, update_mask=mask, ) + # Output a success message. updated_cluster = operation.result() print(f"Cluster was updated successfully: {updated_cluster.cluster_name}") +# [END dataproc_update_cluster] + if __name__ == "__main__": diff --git a/samples/snippets/update_cluster_test.py b/samples/snippets/update_cluster_test.py index 8e5c3090..e57e868a 100644 --- a/samples/snippets/update_cluster_test.py +++ b/samples/snippets/update_cluster_test.py @@ -1,4 +1,4 @@ -# Copyright 2019 Google LLC +# Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,6 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +# This sample walks a user through updating the number of clusters using the Dataproc +# client library. + + import os import uuid @@ -35,6 +39,7 @@ def teardown(): cluster_client = dataproc.ClusterControllerClient( client_options={"api_endpoint": f"{REGION}-dataproc.googleapis.com:443"} ) + # Client library function operation = cluster_client.delete_cluster( request={ @@ -43,11 +48,13 @@ def teardown(): "cluster_name": CLUSTER_NAME, } ) + # Wait for cluster to delete operation.result() def test_update_cluster(capsys): + # Wrapper function for client library function create_cluster.create_cluster(PROJECT_ID, REGION, CLUSTER_NAME) update_cluster.update_cluster(PROJECT_ID, REGION, CLUSTER_NAME, NEW_NUM_INSTANCES) From cd25bf8903999eff5cc6aef3d5103e19acbfafbb Mon Sep 17 00:00:00 2001 From: Lo Ferris <50979514+loferris@users.noreply.github.com> Date: Fri, 23 Jul 2021 15:44:42 -0700 Subject: [PATCH 07/15] Update samples/snippets/update_cluster.py Co-authored-by: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> --- samples/snippets/update_cluster.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/samples/snippets/update_cluster.py b/samples/snippets/update_cluster.py index d477c4a0..4ecb35f3 100644 --- a/samples/snippets/update_cluster.py +++ b/samples/snippets/update_cluster.py @@ -28,9 +28,9 @@ def update_cluster(project_id, region, cluster_name, new_num_instances): """This sample walks a user through updating a Cloud Dataproc cluster using the Python client library. Args: - project_id (string): Project to use for creating resources. - region (string): Region where the resources should live. - cluster_name (string): Name to use for creating a cluster. + project_id (str): Project to use for creating resources. + region (str): Region where the resources should live. + cluster_name (str): Name to use for creating a cluster. """ # Create a client with the endpoint set to the desired cluster region. From 9a80e3dd3b03b8417c746267bd9f170e254f0558 Mon Sep 17 00:00:00 2001 From: Lo Ferris <50979514+loferris@users.noreply.github.com> Date: Fri, 23 Jul 2021 15:45:25 -0700 Subject: [PATCH 08/15] Update samples/snippets/update_cluster.py Co-authored-by: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> --- samples/snippets/update_cluster.py | 1 + 1 file changed, 1 insertion(+) diff --git a/samples/snippets/update_cluster.py b/samples/snippets/update_cluster.py index 4ecb35f3..380303c2 100644 --- a/samples/snippets/update_cluster.py +++ b/samples/snippets/update_cluster.py @@ -27,6 +27,7 @@ def update_cluster(project_id, region, cluster_name, new_num_instances): """This sample walks a user through updating a Cloud Dataproc cluster using the Python client library. + Args: project_id (str): Project to use for creating resources. region (str): Region where the resources should live. From b48cbb35d1f7fbd9e6f4fab4ba18fa1223f3bbfa Mon Sep 17 00:00:00 2001 From: Lo Ferris Date: Fri, 23 Jul 2021 16:24:11 -0700 Subject: [PATCH 09/15] updated test, still fine-tuning --- samples/snippets/update_cluster_test.py | 52 ++++++++++++++++--------- 1 file changed, 34 insertions(+), 18 deletions(-) diff --git a/samples/snippets/update_cluster_test.py b/samples/snippets/update_cluster_test.py index e57e868a..69a17eda 100644 --- a/samples/snippets/update_cluster_test.py +++ b/samples/snippets/update_cluster_test.py @@ -16,48 +16,64 @@ # client library. +from google.cloud.dataproc_v1.services.cluster_controller.client import ClusterControllerClient +from google.cloud.dataproc_v1.services.cluster_controller import async_client +from google.cloud.dataproc_v1.services import cluster_controller +from google.cloud.dataproc_v1.types.clusters import GetClusterRequest import os import uuid from google.cloud import dataproc_v1 as dataproc import pytest -import create_cluster import update_cluster PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"] REGION = "us-central1" -CLUSTER_NAME = "py-cc-test-{}".format(str(uuid.uuid4())) +CLUSTER_NAME = f"py-cc-test-{str(uuid.uuid4())}" NEW_NUM_INSTANCES = 5 +CLUSTER = { + 'project_id': PROJECT_ID, + 'cluster_name': CLUSTER_NAME, + 'config': { + 'master_config': { + 'num_instances': 1, + 'machine_type_uri': 'n1-standard-2' + }, + 'worker_config': { + 'num_instances': 2, + 'machine_type_uri': 'n1-standard-2' + } + } +} @pytest.fixture(autouse=True) -def teardown(): - yield +def setup_teardown(): + cluster_client = dataproc.ClusterControllerClient(client_options={ + 'api_endpoint': '{}-dataproc.googleapis.com:443'.format(REGION) + }) - cluster_client = dataproc.ClusterControllerClient( - client_options={"api_endpoint": f"{REGION}-dataproc.googleapis.com:443"} - ) - - # Client library function - operation = cluster_client.delete_cluster( - request={ - "project_id": PROJECT_ID, - "region": REGION, - "cluster_name": CLUSTER_NAME, - } + # Create the cluster. + operation = cluster_client.create_cluster( + request={"project_id": PROJECT_ID, "region": REGION, "cluster": CLUSTER} ) - - # Wait for cluster to delete operation.result() + yield + + cluster_client.delete_cluster(request={ + "project_id": PROJECT_ID, "region": REGION, "cluster_name": CLUSTER_NAME + }) + def test_update_cluster(capsys): # Wrapper function for client library function - create_cluster.create_cluster(PROJECT_ID, REGION, CLUSTER_NAME) update_cluster.update_cluster(PROJECT_ID, REGION, CLUSTER_NAME, NEW_NUM_INSTANCES) + new_num_cluster = dataproc.ClusterControllerClient.get_cluster(PROJECT_ID) out, _ = capsys.readouterr() assert CLUSTER_NAME in out + assert new_num_cluster.config.worker_config.num_instances == 5 From 853cf21786578a8f5fad71671052edce62fed810 Mon Sep 17 00:00:00 2001 From: Lo Ferris Date: Wed, 28 Jul 2021 17:27:15 -0700 Subject: [PATCH 10/15] added get_cluster to test --- samples/snippets/noxfile.py | 41 ++++++++++++----------- samples/snippets/update_cluster.py | 15 +++++---- samples/snippets/update_cluster_test.py | 44 +++++++++++++------------ 3 files changed, 53 insertions(+), 47 deletions(-) diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py index 5ff9e1db..b3c8658a 100644 --- a/samples/snippets/noxfile.py +++ b/samples/snippets/noxfile.py @@ -38,17 +38,15 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - 'ignored_versions': ["2.7"], - + "ignored_versions": ["2.7"], # Old samples are opted out of enforcing Python type hints # All new samples should feature them - 'enforce_type_hints': False, - + "enforce_type_hints": False, # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', # If you need to use a specific version of pip, # change pip_version_override to the string representation @@ -56,13 +54,13 @@ "pip_version_override": None, # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - 'envs': {}, + "envs": {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append('.') + sys.path.append(".") from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -77,12 +75,12 @@ def get_pytest_env_vars() -> Dict[str, str]: ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG['gcloud_project_env'] + env_key = TEST_CONFIG["gcloud_project_env"] # This should error out if not set. - ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG['envs']) + ret.update(TEST_CONFIG["envs"]) return ret @@ -91,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) @@ -140,7 +138,7 @@ def _determine_local_import_names(start_dir: str) -> List[str]: @nox.session def lint(session: nox.sessions.Session) -> None: - if not TEST_CONFIG['enforce_type_hints']: + if not TEST_CONFIG["enforce_type_hints"]: session.install("flake8", "flake8-import-order") else: session.install("flake8", "flake8-import-order", "flake8-annotations") @@ -149,9 +147,11 @@ def lint(session: nox.sessions.Session) -> None: args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - "." + ".", ] session.run("flake8", *args) + + # # Black # @@ -164,6 +164,7 @@ def blacken(session: nox.sessions.Session) -> None: session.run("black", *python_files) + # # Sample Tests # @@ -172,7 +173,9 @@ def blacken(session: nox.sessions.Session) -> None: PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: if TEST_CONFIG["pip_version_override"]: pip_version = TEST_CONFIG["pip_version_override"] session.install(f"pip=={pip_version}") @@ -202,7 +205,7 @@ def _session_tests(session: nox.sessions.Session, post_install: Callable = None) # on travis where slow and flaky tests are excluded. # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html success_codes=[0, 5], - env=get_pytest_env_vars() + env=get_pytest_env_vars(), ) @@ -212,9 +215,9 @@ def py(session: nox.sessions.Session) -> None: if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip("SKIPPED: {} tests are disabled for this sample.".format( - session.python - )) + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) # @@ -223,7 +226,7 @@ def py(session: nox.sessions.Session) -> None: def _get_repo_root() -> Optional[str]: - """ Returns the root folder of the project. """ + """Returns the root folder of the project.""" # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) for i in range(10): diff --git a/samples/snippets/update_cluster.py b/samples/snippets/update_cluster.py index 380303c2..f4520224 100644 --- a/samples/snippets/update_cluster.py +++ b/samples/snippets/update_cluster.py @@ -33,23 +33,23 @@ def update_cluster(project_id, region, cluster_name, new_num_instances): region (str): Region where the resources should live. cluster_name (str): Name to use for creating a cluster. """ - + # Create a client with the endpoint set to the desired cluster region. client = dataproc.ClusterControllerClient( client_options={"api_endpoint": f"{region}-dataproc.googleapis.com:443"} ) - + # Get cluster you wish to update. cluster = client.get_cluster( project_id=project_id, region=region, cluster_name=cluster_name ) - + # Update number of clusters mask = {"paths": {"config.worker_config.num_instances": str(new_num_instances)}} - + # Update cluster config cluster.config.worker_config.num_instances = new_num_instances - + # Update cluster operation = client.update_cluster( project_id=project_id, @@ -58,13 +58,14 @@ def update_cluster(project_id, region, cluster_name, new_num_instances): cluster_name=cluster_name, update_mask=mask, ) - + # Output a success message. updated_cluster = operation.result() print(f"Cluster was updated successfully: {updated_cluster.cluster_name}") -# [END dataproc_update_cluster] +# [END dataproc_update_cluster] + if __name__ == "__main__": if len(sys.argv) < 5: diff --git a/samples/snippets/update_cluster_test.py b/samples/snippets/update_cluster_test.py index 69a17eda..87d6a2bb 100644 --- a/samples/snippets/update_cluster_test.py +++ b/samples/snippets/update_cluster_test.py @@ -16,7 +16,9 @@ # client library. -from google.cloud.dataproc_v1.services.cluster_controller.client import ClusterControllerClient +from google.cloud.dataproc_v1.services.cluster_controller.client import ( + ClusterControllerClient, +) from google.cloud.dataproc_v1.services.cluster_controller import async_client from google.cloud.dataproc_v1.services import cluster_controller from google.cloud.dataproc_v1.types.clusters import GetClusterRequest @@ -34,26 +36,20 @@ CLUSTER_NAME = f"py-cc-test-{str(uuid.uuid4())}" NEW_NUM_INSTANCES = 5 CLUSTER = { - 'project_id': PROJECT_ID, - 'cluster_name': CLUSTER_NAME, - 'config': { - 'master_config': { - 'num_instances': 1, - 'machine_type_uri': 'n1-standard-2' - }, - 'worker_config': { - 'num_instances': 2, - 'machine_type_uri': 'n1-standard-2' - } - } + "project_id": PROJECT_ID, + "cluster_name": CLUSTER_NAME, + "config": { + "master_config": {"num_instances": 1, "machine_type_uri": "n1-standard-2"}, + "worker_config": {"num_instances": 2, "machine_type_uri": "n1-standard-2"}, + }, } @pytest.fixture(autouse=True) def setup_teardown(): - cluster_client = dataproc.ClusterControllerClient(client_options={ - 'api_endpoint': '{}-dataproc.googleapis.com:443'.format(REGION) - }) + cluster_client = dataproc.ClusterControllerClient( + client_options={"api_endpoint": "{}-dataproc.googleapis.com:443".format(REGION)} + ) # Create the cluster. operation = cluster_client.create_cluster( @@ -63,16 +59,22 @@ def setup_teardown(): yield - cluster_client.delete_cluster(request={ - "project_id": PROJECT_ID, "region": REGION, "cluster_name": CLUSTER_NAME - }) + cluster_client.delete_cluster( + request={ + "project_id": PROJECT_ID, + "region": REGION, + "cluster_name": CLUSTER_NAME, + } + ) def test_update_cluster(capsys): - + # Wrapper function for client library function update_cluster.update_cluster(PROJECT_ID, REGION, CLUSTER_NAME, NEW_NUM_INSTANCES) - new_num_cluster = dataproc.ClusterControllerClient.get_cluster(PROJECT_ID) + new_num_cluster = dataproc.ClusterControllerClient.get_cluster( + project_id=PROJECT_ID, region=REGION, cluster_name=CLUSTER_NAME + ) out, _ = capsys.readouterr() assert CLUSTER_NAME in out From 0bba122635020b62381dbf01c5d220cee62c9f70 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Thu, 29 Jul 2021 00:28:44 +0000 Subject: [PATCH 11/15] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/master/packages/owl-bot/README.md --- samples/snippets/noxfile.py | 41 +++++++++++++++++-------------------- 1 file changed, 19 insertions(+), 22 deletions(-) diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py index b3c8658a..5ff9e1db 100644 --- a/samples/snippets/noxfile.py +++ b/samples/snippets/noxfile.py @@ -38,15 +38,17 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - "ignored_versions": ["2.7"], + 'ignored_versions': ["2.7"], + # Old samples are opted out of enforcing Python type hints # All new samples should feature them - "enforce_type_hints": False, + 'enforce_type_hints': False, + # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', # If you need to use a specific version of pip, # change pip_version_override to the string representation @@ -54,13 +56,13 @@ "pip_version_override": None, # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - "envs": {}, + 'envs': {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append(".") + sys.path.append('.') from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -75,12 +77,12 @@ def get_pytest_env_vars() -> Dict[str, str]: ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG["gcloud_project_env"] + env_key = TEST_CONFIG['gcloud_project_env'] # This should error out if not set. - ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] + ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG["envs"]) + ret.update(TEST_CONFIG['envs']) return ret @@ -89,7 +91,7 @@ def get_pytest_env_vars() -> Dict[str, str]: ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] +IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) @@ -138,7 +140,7 @@ def _determine_local_import_names(start_dir: str) -> List[str]: @nox.session def lint(session: nox.sessions.Session) -> None: - if not TEST_CONFIG["enforce_type_hints"]: + if not TEST_CONFIG['enforce_type_hints']: session.install("flake8", "flake8-import-order") else: session.install("flake8", "flake8-import-order", "flake8-annotations") @@ -147,11 +149,9 @@ def lint(session: nox.sessions.Session) -> None: args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - ".", + "." ] session.run("flake8", *args) - - # # Black # @@ -164,7 +164,6 @@ def blacken(session: nox.sessions.Session) -> None: session.run("black", *python_files) - # # Sample Tests # @@ -173,9 +172,7 @@ def blacken(session: nox.sessions.Session) -> None: PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests( - session: nox.sessions.Session, post_install: Callable = None -) -> None: +def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: if TEST_CONFIG["pip_version_override"]: pip_version = TEST_CONFIG["pip_version_override"] session.install(f"pip=={pip_version}") @@ -205,7 +202,7 @@ def _session_tests( # on travis where slow and flaky tests are excluded. # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html success_codes=[0, 5], - env=get_pytest_env_vars(), + env=get_pytest_env_vars() ) @@ -215,9 +212,9 @@ def py(session: nox.sessions.Session) -> None: if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip( - "SKIPPED: {} tests are disabled for this sample.".format(session.python) - ) + session.skip("SKIPPED: {} tests are disabled for this sample.".format( + session.python + )) # @@ -226,7 +223,7 @@ def py(session: nox.sessions.Session) -> None: def _get_repo_root() -> Optional[str]: - """Returns the root folder of the project.""" + """ Returns the root folder of the project. """ # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) for i in range(10): From 9ac4e886eba209fbddc4af371bda2295a5c8d9f1 Mon Sep 17 00:00:00 2001 From: Lo Ferris Date: Wed, 28 Jul 2021 19:56:07 -0700 Subject: [PATCH 12/15] another attempt at writing test --- samples/snippets/update_cluster_test.py | 21 +++++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/samples/snippets/update_cluster_test.py b/samples/snippets/update_cluster_test.py index 87d6a2bb..a721d48f 100644 --- a/samples/snippets/update_cluster_test.py +++ b/samples/snippets/update_cluster_test.py @@ -44,6 +44,11 @@ }, } +# option 3 +"""cluster_client = dataproc.ClusterControllerClient( + client_options={"api_endpoint": "{}-dataproc.googleapis.com:443".format(REGION)} + )""" + @pytest.fixture(autouse=True) def setup_teardown(): @@ -68,11 +73,23 @@ def setup_teardown(): ) -def test_update_cluster(capsys): +# option 2 +"""@pytest.fixture(autouse=True) +def cluster_client(): + cluster_client = dataproc.ClusterControllerClient( + client_options={"api_endpoint": "{}-dataproc.googleapis.com:443".format(REGION)} + ) + return cluster_client""" + +def test_update_cluster(capsys): + # option 1 + cluster_client = dataproc.ClusterControllerClient( + client_options={"api_endpoint": "{}-dataproc.googleapis.com:443".format(REGION)} + ) # Wrapper function for client library function update_cluster.update_cluster(PROJECT_ID, REGION, CLUSTER_NAME, NEW_NUM_INSTANCES) - new_num_cluster = dataproc.ClusterControllerClient.get_cluster( + new_num_cluster = cluster_client.get_cluster( project_id=PROJECT_ID, region=REGION, cluster_name=CLUSTER_NAME ) From 47b7639c90592913934663bc690f86e0def68150 Mon Sep 17 00:00:00 2001 From: Lo Ferris Date: Thu, 29 Jul 2021 14:09:38 -0700 Subject: [PATCH 13/15] new test pattern --- samples/snippets/noxfile.py | 41 +++++++++++++------------ samples/snippets/update_cluster_test.py | 25 +++------------ 2 files changed, 26 insertions(+), 40 deletions(-) diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py index 5ff9e1db..b3c8658a 100644 --- a/samples/snippets/noxfile.py +++ b/samples/snippets/noxfile.py @@ -38,17 +38,15 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - 'ignored_versions': ["2.7"], - + "ignored_versions": ["2.7"], # Old samples are opted out of enforcing Python type hints # All new samples should feature them - 'enforce_type_hints': False, - + "enforce_type_hints": False, # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', # If you need to use a specific version of pip, # change pip_version_override to the string representation @@ -56,13 +54,13 @@ "pip_version_override": None, # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - 'envs': {}, + "envs": {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append('.') + sys.path.append(".") from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -77,12 +75,12 @@ def get_pytest_env_vars() -> Dict[str, str]: ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG['gcloud_project_env'] + env_key = TEST_CONFIG["gcloud_project_env"] # This should error out if not set. - ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG['envs']) + ret.update(TEST_CONFIG["envs"]) return ret @@ -91,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) @@ -140,7 +138,7 @@ def _determine_local_import_names(start_dir: str) -> List[str]: @nox.session def lint(session: nox.sessions.Session) -> None: - if not TEST_CONFIG['enforce_type_hints']: + if not TEST_CONFIG["enforce_type_hints"]: session.install("flake8", "flake8-import-order") else: session.install("flake8", "flake8-import-order", "flake8-annotations") @@ -149,9 +147,11 @@ def lint(session: nox.sessions.Session) -> None: args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - "." + ".", ] session.run("flake8", *args) + + # # Black # @@ -164,6 +164,7 @@ def blacken(session: nox.sessions.Session) -> None: session.run("black", *python_files) + # # Sample Tests # @@ -172,7 +173,9 @@ def blacken(session: nox.sessions.Session) -> None: PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: if TEST_CONFIG["pip_version_override"]: pip_version = TEST_CONFIG["pip_version_override"] session.install(f"pip=={pip_version}") @@ -202,7 +205,7 @@ def _session_tests(session: nox.sessions.Session, post_install: Callable = None) # on travis where slow and flaky tests are excluded. # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html success_codes=[0, 5], - env=get_pytest_env_vars() + env=get_pytest_env_vars(), ) @@ -212,9 +215,9 @@ def py(session: nox.sessions.Session) -> None: if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip("SKIPPED: {} tests are disabled for this sample.".format( - session.python - )) + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) # @@ -223,7 +226,7 @@ def py(session: nox.sessions.Session) -> None: def _get_repo_root() -> Optional[str]: - """ Returns the root folder of the project. """ + """Returns the root folder of the project.""" # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) for i in range(10): diff --git a/samples/snippets/update_cluster_test.py b/samples/snippets/update_cluster_test.py index a721d48f..2ba2f2e7 100644 --- a/samples/snippets/update_cluster_test.py +++ b/samples/snippets/update_cluster_test.py @@ -19,9 +19,6 @@ from google.cloud.dataproc_v1.services.cluster_controller.client import ( ClusterControllerClient, ) -from google.cloud.dataproc_v1.services.cluster_controller import async_client -from google.cloud.dataproc_v1.services import cluster_controller -from google.cloud.dataproc_v1.types.clusters import GetClusterRequest import os import uuid @@ -44,18 +41,9 @@ }, } -# option 3 -"""cluster_client = dataproc.ClusterControllerClient( - client_options={"api_endpoint": "{}-dataproc.googleapis.com:443".format(REGION)} - )""" - @pytest.fixture(autouse=True) -def setup_teardown(): - cluster_client = dataproc.ClusterControllerClient( - client_options={"api_endpoint": "{}-dataproc.googleapis.com:443".format(REGION)} - ) - +def setup_teardown(cluster_client): # Create the cluster. operation = cluster_client.create_cluster( request={"project_id": PROJECT_ID, "region": REGION, "cluster": CLUSTER} @@ -73,20 +61,15 @@ def setup_teardown(): ) -# option 2 -"""@pytest.fixture(autouse=True) +@pytest.fixture def cluster_client(): cluster_client = dataproc.ClusterControllerClient( client_options={"api_endpoint": "{}-dataproc.googleapis.com:443".format(REGION)} ) - return cluster_client""" + return cluster_client -def test_update_cluster(capsys): - # option 1 - cluster_client = dataproc.ClusterControllerClient( - client_options={"api_endpoint": "{}-dataproc.googleapis.com:443".format(REGION)} - ) +def test_update_cluster(capsys, cluster_client: ClusterControllerClient): # Wrapper function for client library function update_cluster.update_cluster(PROJECT_ID, REGION, CLUSTER_NAME, NEW_NUM_INSTANCES) new_num_cluster = cluster_client.get_cluster( From aa93d2923f04b808ffd7ae4eae878897b6806d9a Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Thu, 29 Jul 2021 21:11:11 +0000 Subject: [PATCH 14/15] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/master/packages/owl-bot/README.md --- samples/snippets/noxfile.py | 41 +++++++++++++++++-------------------- 1 file changed, 19 insertions(+), 22 deletions(-) diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py index b3c8658a..5ff9e1db 100644 --- a/samples/snippets/noxfile.py +++ b/samples/snippets/noxfile.py @@ -38,15 +38,17 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - "ignored_versions": ["2.7"], + 'ignored_versions': ["2.7"], + # Old samples are opted out of enforcing Python type hints # All new samples should feature them - "enforce_type_hints": False, + 'enforce_type_hints': False, + # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', # If you need to use a specific version of pip, # change pip_version_override to the string representation @@ -54,13 +56,13 @@ "pip_version_override": None, # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - "envs": {}, + 'envs': {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append(".") + sys.path.append('.') from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -75,12 +77,12 @@ def get_pytest_env_vars() -> Dict[str, str]: ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG["gcloud_project_env"] + env_key = TEST_CONFIG['gcloud_project_env'] # This should error out if not set. - ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] + ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG["envs"]) + ret.update(TEST_CONFIG['envs']) return ret @@ -89,7 +91,7 @@ def get_pytest_env_vars() -> Dict[str, str]: ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] +IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) @@ -138,7 +140,7 @@ def _determine_local_import_names(start_dir: str) -> List[str]: @nox.session def lint(session: nox.sessions.Session) -> None: - if not TEST_CONFIG["enforce_type_hints"]: + if not TEST_CONFIG['enforce_type_hints']: session.install("flake8", "flake8-import-order") else: session.install("flake8", "flake8-import-order", "flake8-annotations") @@ -147,11 +149,9 @@ def lint(session: nox.sessions.Session) -> None: args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - ".", + "." ] session.run("flake8", *args) - - # # Black # @@ -164,7 +164,6 @@ def blacken(session: nox.sessions.Session) -> None: session.run("black", *python_files) - # # Sample Tests # @@ -173,9 +172,7 @@ def blacken(session: nox.sessions.Session) -> None: PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests( - session: nox.sessions.Session, post_install: Callable = None -) -> None: +def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: if TEST_CONFIG["pip_version_override"]: pip_version = TEST_CONFIG["pip_version_override"] session.install(f"pip=={pip_version}") @@ -205,7 +202,7 @@ def _session_tests( # on travis where slow and flaky tests are excluded. # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html success_codes=[0, 5], - env=get_pytest_env_vars(), + env=get_pytest_env_vars() ) @@ -215,9 +212,9 @@ def py(session: nox.sessions.Session) -> None: if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip( - "SKIPPED: {} tests are disabled for this sample.".format(session.python) - ) + session.skip("SKIPPED: {} tests are disabled for this sample.".format( + session.python + )) # @@ -226,7 +223,7 @@ def py(session: nox.sessions.Session) -> None: def _get_repo_root() -> Optional[str]: - """Returns the root folder of the project.""" + """ Returns the root folder of the project. """ # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) for i in range(10): From eaacde5600a509c432e9fe899c0b02b324933d64 Mon Sep 17 00:00:00 2001 From: Lo Ferris Date: Mon, 2 Aug 2021 14:02:47 -0700 Subject: [PATCH 15/15] updated static for new_num_instances and fixed linting error --- samples/snippets/update_cluster_test.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/samples/snippets/update_cluster_test.py b/samples/snippets/update_cluster_test.py index 2ba2f2e7..9c608ac4 100644 --- a/samples/snippets/update_cluster_test.py +++ b/samples/snippets/update_cluster_test.py @@ -16,13 +16,12 @@ # client library. -from google.cloud.dataproc_v1.services.cluster_controller.client import ( - ClusterControllerClient, -) import os import uuid -from google.cloud import dataproc_v1 as dataproc +from google.cloud.dataproc_v1.services.cluster_controller.client import ( + ClusterControllerClient, +) import pytest import update_cluster @@ -63,7 +62,7 @@ def setup_teardown(cluster_client): @pytest.fixture def cluster_client(): - cluster_client = dataproc.ClusterControllerClient( + cluster_client = ClusterControllerClient( client_options={"api_endpoint": "{}-dataproc.googleapis.com:443".format(REGION)} ) return cluster_client @@ -78,4 +77,4 @@ def test_update_cluster(capsys, cluster_client: ClusterControllerClient): out, _ = capsys.readouterr() assert CLUSTER_NAME in out - assert new_num_cluster.config.worker_config.num_instances == 5 + assert new_num_cluster.config.worker_config.num_instances == NEW_NUM_INSTANCES