From 58fa738f3619974f1103a494b06dc7da611973f1 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 12 Jan 2022 05:52:00 -0500 Subject: [PATCH] ci: run samples under Python 3.9 / 3.10 (#478) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * ci: run samples under Python 3.9 / 3.10 Refresh each sample's noxfile via: ----------------------------- %< ----------------------------- $ for noxfile in samples/*/noxfile.py; do echo "Refreshing $noxfile"; wget -O $noxfile https://github.com/GoogleCloudPlatform/python-docs-samples/raw/main/noxfile-template.py echo "Blackening samples for $noxfile" nox -f $noxfile -s blacken done ----------------------------- %< ----------------------------- Closes #477. * fix: disable install-from-sorce for beam sample Per #203. * fix: skip beam sample for Python 3.10 Beam-related wheels are not yet available. * fix: also refresh noxfiles for 'samples/snippets' * ci: don't enforce type hints on old samples * resolve issue where samples templates are not updated * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * resolve mypy error Name __path__ already defined * add workaroud from PR #203 Co-authored-by: Anthonios Partheniou Co-authored-by: Owl Bot --- google/__init__.py | 6 +- google/cloud/__init__.py | 6 +- owlbot.py | 23 +-- samples/beam/hello_world_write.py | 40 +++-- samples/beam/hello_world_write_test.py | 19 +- samples/beam/noxfile.py | 99 ++++++++--- samples/beam/noxfile_config.py | 45 +++++ samples/hello/main.py | 51 +++--- samples/hello/main_test.py | 23 ++- samples/hello/noxfile.py | 104 ++++++++--- samples/hello_happybase/main.py | 55 +++--- samples/hello_happybase/main_test.py | 28 ++- samples/hello_happybase/noxfile.py | 104 ++++++++--- samples/instanceadmin/noxfile.py | 104 ++++++++--- samples/metricscaler/metricscaler.py | 119 +++++++------ samples/metricscaler/metricscaler_test.py | 92 +++++----- samples/metricscaler/noxfile.py | 104 ++++++++--- samples/quickstart/main.py | 31 ++-- samples/quickstart/main_test.py | 10 +- samples/quickstart/noxfile.py | 104 ++++++++--- samples/quickstart_happybase/main.py | 29 ++- samples/quickstart_happybase/main_test.py | 10 +- samples/quickstart_happybase/noxfile.py | 104 ++++++++--- samples/snippets/filters/filter_snippets.py | 99 +++++++---- samples/snippets/filters/filters_test.py | 70 +++----- samples/snippets/filters/noxfile.py | 104 ++++++++--- samples/snippets/reads/noxfile.py | 104 ++++++++--- samples/snippets/reads/read_snippets.py | 36 ++-- samples/snippets/reads/reads_test.py | 8 +- samples/snippets/writes/noxfile.py | 104 ++++++++--- samples/snippets/writes/write_batch.py | 32 ++-- .../snippets/writes/write_conditionally.py | 18 +- samples/snippets/writes/write_increment.py | 4 +- samples/snippets/writes/write_simple.py | 19 +- samples/snippets/writes/writes_test.py | 16 +- samples/tableadmin/noxfile.py | 104 ++++++++--- samples/tableadmin/tableadmin.py | 166 ++++++++++-------- samples/tableadmin/tableadmin_test.py | 44 ++--- 38 files changed, 1395 insertions(+), 843 deletions(-) create mode 100644 samples/beam/noxfile_config.py diff --git a/google/__init__.py b/google/__init__.py index ced5017a1..a5ba80656 100644 --- a/google/__init__.py +++ b/google/__init__.py @@ -1,10 +1,6 @@ -from typing import List - try: import pkg_resources pkg_resources.declare_namespace(__name__) except ImportError: - import pkgutil - - __path__: List[str] = pkgutil.extend_path(__path__, __name__) + pass diff --git a/google/cloud/__init__.py b/google/cloud/__init__.py index ced5017a1..a5ba80656 100644 --- a/google/cloud/__init__.py +++ b/google/cloud/__init__.py @@ -1,10 +1,6 @@ -from typing import List - try: import pkg_resources pkg_resources.declare_namespace(__name__) except ImportError: - import pkgutil - - __path__: List[str] = pkgutil.extend_path(__path__, __name__) + pass diff --git a/owlbot.py b/owlbot.py index ca452ddf3..6ab6579e1 100644 --- a/owlbot.py +++ b/owlbot.py @@ -217,20 +217,15 @@ def lint_setup_py(session): # Samples templates # ---------------------------------------------------------------------------- -sample_files = common.py_samples(samples=True) -for path in sample_files: - s.move(path) - -# Note: python-docs-samples is not yet using 'main': -#s.replace( -# "samples/**/*.md", -# r"python-docs-samples/blob/master/", -# "python-docs-samples/blob/main/", -#) +python.py_samples(skip_readmes=True) + s.replace( - "samples/**/*.md", - r"google-cloud-python/blob/master/", - "google-cloud-python/blob/main/", -) + "samples/beam/noxfile.py", + """INSTALL_LIBRARY_FROM_SOURCE \= os.environ.get\("INSTALL_LIBRARY_FROM_SOURCE", False\) in \( + "True", + "true", +\)""", + """# todo(kolea2): temporary workaround to install pinned dep version +INSTALL_LIBRARY_FROM_SOURCE = False""") s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/samples/beam/hello_world_write.py b/samples/beam/hello_world_write.py index 894edc46f..89f541d0d 100644 --- a/samples/beam/hello_world_write.py +++ b/samples/beam/hello_world_write.py @@ -23,28 +23,29 @@ class BigtableOptions(PipelineOptions): @classmethod def _add_argparse_args(cls, parser): parser.add_argument( - '--bigtable-project', - help='The Bigtable project ID, this can be different than your ' - 'Dataflow project', - default='bigtable-project') + "--bigtable-project", + help="The Bigtable project ID, this can be different than your " + "Dataflow project", + default="bigtable-project", + ) parser.add_argument( - '--bigtable-instance', - help='The Bigtable instance ID', - default='bigtable-instance') + "--bigtable-instance", + help="The Bigtable instance ID", + default="bigtable-instance", + ) parser.add_argument( - '--bigtable-table', - help='The Bigtable table ID in the instance.', - default='bigtable-table') + "--bigtable-table", + help="The Bigtable table ID in the instance.", + default="bigtable-table", + ) class CreateRowFn(beam.DoFn): def process(self, key): direct_row = row.DirectRow(row_key=key) direct_row.set_cell( - "stats_summary", - b"os_build", - b"android", - datetime.datetime.now()) + "stats_summary", b"os_build", b"android", datetime.datetime.now() + ) return [direct_row] @@ -52,13 +53,14 @@ def run(argv=None): """Build and run the pipeline.""" options = BigtableOptions(argv) with beam.Pipeline(options=options) as p: - p | beam.Create(["phone#4c410523#20190501", - "phone#4c410523#20190502"]) | beam.ParDo( - CreateRowFn()) | WriteToBigTable( + p | beam.Create( + ["phone#4c410523#20190501", "phone#4c410523#20190502"] + ) | beam.ParDo(CreateRowFn()) | WriteToBigTable( project_id=options.bigtable_project, instance_id=options.bigtable_instance, - table_id=options.bigtable_table) + table_id=options.bigtable_table, + ) -if __name__ == '__main__': +if __name__ == "__main__": run() diff --git a/samples/beam/hello_world_write_test.py b/samples/beam/hello_world_write_test.py index cdbecc661..4e9a47c7d 100644 --- a/samples/beam/hello_world_write_test.py +++ b/samples/beam/hello_world_write_test.py @@ -19,9 +19,9 @@ import hello_world_write -PROJECT = os.environ['GOOGLE_CLOUD_PROJECT'] -BIGTABLE_INSTANCE = os.environ['BIGTABLE_INSTANCE'] -TABLE_ID_PREFIX = 'mobile-time-series-{}' +PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] +BIGTABLE_INSTANCE = os.environ["BIGTABLE_INSTANCE"] +TABLE_ID_PREFIX = "mobile-time-series-{}" @pytest.fixture(scope="module", autouse=True) @@ -34,17 +34,20 @@ def table_id(): if table.exists(): table.delete() - table.create(column_families={'stats_summary': None}) + table.create(column_families={"stats_summary": None}) yield table_id table.delete() def test_hello_world_write(table_id): - hello_world_write.run([ - '--bigtable-project=%s' % PROJECT, - '--bigtable-instance=%s' % BIGTABLE_INSTANCE, - '--bigtable-table=%s' % table_id]) + hello_world_write.run( + [ + "--bigtable-project=%s" % PROJECT, + "--bigtable-instance=%s" % BIGTABLE_INSTANCE, + "--bigtable-table=%s" % table_id, + ] + ) client = bigtable.Client(project=PROJECT, admin=True) instance = client.instance(BIGTABLE_INSTANCE) diff --git a/samples/beam/noxfile.py b/samples/beam/noxfile.py index 171bee657..d7567dee9 100644 --- a/samples/beam/noxfile.py +++ b/samples/beam/noxfile.py @@ -17,6 +17,7 @@ import os from pathlib import Path import sys +from typing import Callable, Dict, List, Optional import nox @@ -27,8 +28,9 @@ # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING -# Copy `noxfile_config.py` to your directory and modify it instead. +BLACK_VERSION = "black==19.10b0" +# Copy `noxfile_config.py` to your directory and modify it instead. # `TEST_CONFIG` dict is a configuration hook that allows users to # modify the test configurations. The values here should be in sync @@ -37,24 +39,29 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - 'ignored_versions': ["2.7"], - + "ignored_versions": [], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": False, # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - 'envs': {}, + "envs": {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append('.') + sys.path.append(".") from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -64,37 +71,41 @@ TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) -def get_pytest_env_vars(): +def get_pytest_env_vars() -> Dict[str, str]: """Returns a dict for pytest invocation.""" ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG['gcloud_project_env'] + env_key = TEST_CONFIG["gcloud_project_env"] # This should error out if not set. - ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG['envs']) + ret.update(TEST_CONFIG["envs"]) return ret # DO NOT EDIT - automatically generated. -# All versions used to tested samples. -ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] +# All versions used to test samples. +ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) # todo(kolea2): temporary workaround to install pinned dep version INSTALL_LIBRARY_FROM_SOURCE = False + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + # # Style Checks # -def _determine_local_import_names(start_dir): +def _determine_local_import_names(start_dir: str) -> List[str]: """Determines all import names that should be considered "local". This is used when running the linter to insure that import order is @@ -132,18 +143,34 @@ def _determine_local_import_names(start_dir): @nox.session -def lint(session): - session.install("flake8", "flake8-import-order") +def lint(session: nox.sessions.Session) -> None: + if not TEST_CONFIG["enforce_type_hints"]: + session.install("flake8", "flake8-import-order") + else: + session.install("flake8", "flake8-import-order", "flake8-annotations") local_names = _determine_local_import_names(".") args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - "." + ".", ] session.run("flake8", *args) +# +# Black +# + + +@nox.session +def blacken(session: nox.sessions.Session) -> None: + session.install(BLACK_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + session.run("black", *python_files) + + # # Sample Tests # @@ -152,13 +179,24 @@ def lint(session): PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests(session, post_install=None): +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): - session.install("-r", "requirements.txt") + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") if os.path.exists("requirements-test.txt"): - session.install("-r", "requirements-test.txt") + if os.path.exists("constraints-test.txt"): + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") + else: + session.install("-r", "requirements-test.txt") if INSTALL_LIBRARY_FROM_SOURCE: session.install("-e", _get_repo_root()) @@ -173,19 +211,19 @@ def _session_tests(session, post_install=None): # on travis where slow and flaky tests are excluded. # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html success_codes=[0, 5], - env=get_pytest_env_vars() + env=get_pytest_env_vars(), ) @nox.session(python=ALL_VERSIONS) -def py(session): +def py(session: nox.sessions.Session) -> None: """Runs py.test for a sample using the specified version of Python.""" if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip("SKIPPED: {} tests are disabled for this sample.".format( - session.python - )) + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) # @@ -193,7 +231,7 @@ def py(session): # -def _get_repo_root(): +def _get_repo_root() -> Optional[str]: """ Returns the root folder of the project. """ # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) @@ -202,6 +240,11 @@ def _get_repo_root(): break if Path(p / ".git").exists(): return str(p) + # .git is not available in repos cloned via Cloud Build + # setup.py is always in the library's root, so use that instead + # https://github.com/googleapis/synthtool/issues/792 + if Path(p / "setup.py").exists(): + return str(p) p = p.parent raise Exception("Unable to detect repository root.") @@ -211,7 +254,7 @@ def _get_repo_root(): @nox.session @nox.parametrize("path", GENERATED_READMES) -def readmegen(session, path): +def readmegen(session: nox.sessions.Session, path: str) -> None: """(Re-)generates the readme for a sample.""" session.install("jinja2", "pyyaml") dir_ = os.path.dirname(path) diff --git a/samples/beam/noxfile_config.py b/samples/beam/noxfile_config.py new file mode 100644 index 000000000..eb01435a0 --- /dev/null +++ b/samples/beam/noxfile_config.py @@ -0,0 +1,45 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Default TEST_CONFIG_OVERRIDE for python repos. + +# You can copy this file into your directory, then it will be imported from +# the noxfile.py. + +# The source of truth: +# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/noxfile_config.py + +TEST_CONFIG_OVERRIDE = { + # You can opt out from the test for specific Python versions. + "ignored_versions": [ + "2.7", # not supported + "3.10", # Beam wheels not yet released for Python 3.10 + ], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": False, + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + "envs": {}, +} diff --git a/samples/hello/main.py b/samples/hello/main.py index 073270847..7b2b1764a 100644 --- a/samples/hello/main.py +++ b/samples/hello/main.py @@ -25,12 +25,14 @@ """ import argparse + # [START bigtable_hw_imports] import datetime from google.cloud import bigtable from google.cloud.bigtable import column_family from google.cloud.bigtable import row_filters + # [END bigtable_hw_imports] @@ -43,14 +45,14 @@ def main(project_id, instance_id, table_id): # [END bigtable_hw_connect] # [START bigtable_hw_create_table] - print('Creating the {} table.'.format(table_id)) + print("Creating the {} table.".format(table_id)) table = instance.table(table_id) - print('Creating column family cf1 with Max Version GC rule...') + print("Creating column family cf1 with Max Version GC rule...") # Create a column family with GC policy : most recent N versions # Define the GC policy to retain only the most recent 2 versions max_versions_rule = column_family.MaxVersionsGCRule(2) - column_family_id = 'cf1' + column_family_id = "cf1" column_families = {column_family_id: max_versions_rule} if not table.exists(): table.create(column_families=column_families) @@ -59,10 +61,10 @@ def main(project_id, instance_id, table_id): # [END bigtable_hw_create_table] # [START bigtable_hw_write_rows] - print('Writing some greetings to the table.') - greetings = ['Hello World!', 'Hello Cloud Bigtable!', 'Hello Python!'] + print("Writing some greetings to the table.") + greetings = ["Hello World!", "Hello Cloud Bigtable!", "Hello Python!"] rows = [] - column = 'greeting'.encode() + column = "greeting".encode() for i, value in enumerate(greetings): # Note: This example uses sequential numeric IDs for simplicity, # but this can result in poor performance in a production @@ -74,12 +76,11 @@ def main(project_id, instance_id, table_id): # the best performance, see the documentation: # # https://cloud.google.com/bigtable/docs/schema-design - row_key = 'greeting{}'.format(i).encode() + row_key = "greeting{}".format(i).encode() row = table.direct_row(row_key) - row.set_cell(column_family_id, - column, - value, - timestamp=datetime.datetime.utcnow()) + row.set_cell( + column_family_id, column, value, timestamp=datetime.datetime.utcnow() + ) rows.append(row) table.mutate_rows(rows) # [END bigtable_hw_write_rows] @@ -91,40 +92,40 @@ def main(project_id, instance_id, table_id): # [END bigtable_hw_create_filter] # [START bigtable_hw_get_with_filter] - print('Getting a single greeting by row key.') - key = 'greeting0'.encode() + print("Getting a single greeting by row key.") + key = "greeting0".encode() row = table.read_row(key, row_filter) cell = row.cells[column_family_id][column][0] - print(cell.value.decode('utf-8')) + print(cell.value.decode("utf-8")) # [END bigtable_hw_get_with_filter] # [START bigtable_hw_scan_with_filter] - print('Scanning for all greetings:') + print("Scanning for all greetings:") partial_rows = table.read_rows(filter_=row_filter) for row in partial_rows: cell = row.cells[column_family_id][column][0] - print(cell.value.decode('utf-8')) + print(cell.value.decode("utf-8")) # [END bigtable_hw_scan_with_filter] # [START bigtable_hw_delete_table] - print('Deleting the {} table.'.format(table_id)) + print("Deleting the {} table.".format(table_id)) table.delete() # [END bigtable_hw_delete_table] -if __name__ == '__main__': +if __name__ == "__main__": parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.ArgumentDefaultsHelpFormatter) - parser.add_argument('project_id', help='Your Cloud Platform project ID.') + description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter + ) + parser.add_argument("project_id", help="Your Cloud Platform project ID.") parser.add_argument( - 'instance_id', help='ID of the Cloud Bigtable instance to connect to.') + "instance_id", help="ID of the Cloud Bigtable instance to connect to." + ) parser.add_argument( - '--table', - help='Table to create and destroy.', - default='Hello-Bigtable') + "--table", help="Table to create and destroy.", default="Hello-Bigtable" + ) args = parser.parse_args() main(args.project_id, args.instance_id, args.table) diff --git a/samples/hello/main_test.py b/samples/hello/main_test.py index 49b8098fc..641b34d11 100644 --- a/samples/hello/main_test.py +++ b/samples/hello/main_test.py @@ -17,23 +17,22 @@ from main import main -PROJECT = os.environ['GOOGLE_CLOUD_PROJECT'] -BIGTABLE_INSTANCE = os.environ['BIGTABLE_INSTANCE'] -TABLE_NAME_FORMAT = 'hello-world-test-{}' +PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] +BIGTABLE_INSTANCE = os.environ["BIGTABLE_INSTANCE"] +TABLE_NAME_FORMAT = "hello-world-test-{}" TABLE_NAME_RANGE = 10000 def test_main(capsys): - table_name = TABLE_NAME_FORMAT.format( - random.randrange(TABLE_NAME_RANGE)) + table_name = TABLE_NAME_FORMAT.format(random.randrange(TABLE_NAME_RANGE)) main(PROJECT, BIGTABLE_INSTANCE, table_name) out, _ = capsys.readouterr() - assert 'Creating the {} table.'.format(table_name) in out - assert 'Writing some greetings to the table.' in out - assert 'Getting a single greeting by row key.' in out - assert 'Hello World!' in out - assert 'Scanning for all greetings' in out - assert 'Hello Cloud Bigtable!' in out - assert 'Deleting the {} table.'.format(table_name) in out + assert "Creating the {} table.".format(table_name) in out + assert "Writing some greetings to the table." in out + assert "Getting a single greeting by row key." in out + assert "Hello World!" in out + assert "Scanning for all greetings" in out + assert "Hello Cloud Bigtable!" in out + assert "Deleting the {} table.".format(table_name) in out diff --git a/samples/hello/noxfile.py b/samples/hello/noxfile.py index ba55d7ce5..93a9122cc 100644 --- a/samples/hello/noxfile.py +++ b/samples/hello/noxfile.py @@ -17,6 +17,7 @@ import os from pathlib import Path import sys +from typing import Callable, Dict, List, Optional import nox @@ -27,8 +28,9 @@ # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING -# Copy `noxfile_config.py` to your directory and modify it instead. +BLACK_VERSION = "black==19.10b0" +# Copy `noxfile_config.py` to your directory and modify it instead. # `TEST_CONFIG` dict is a configuration hook that allows users to # modify the test configurations. The values here should be in sync @@ -37,24 +39,29 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - 'ignored_versions': ["2.7"], - + "ignored_versions": [], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": False, # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - 'envs': {}, + "envs": {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append('.') + sys.path.append(".") from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -64,36 +71,43 @@ TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) -def get_pytest_env_vars(): +def get_pytest_env_vars() -> Dict[str, str]: """Returns a dict for pytest invocation.""" ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG['gcloud_project_env'] + env_key = TEST_CONFIG["gcloud_project_env"] # This should error out if not set. - ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG['envs']) + ret.update(TEST_CONFIG["envs"]) return ret # DO NOT EDIT - automatically generated. -# All versions used to tested samples. -ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] +# All versions used to test samples. +ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) -INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False)) +INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( + "True", + "true", +) + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + # # Style Checks # -def _determine_local_import_names(start_dir): +def _determine_local_import_names(start_dir: str) -> List[str]: """Determines all import names that should be considered "local". This is used when running the linter to insure that import order is @@ -131,18 +145,34 @@ def _determine_local_import_names(start_dir): @nox.session -def lint(session): - session.install("flake8", "flake8-import-order") +def lint(session: nox.sessions.Session) -> None: + if not TEST_CONFIG["enforce_type_hints"]: + session.install("flake8", "flake8-import-order") + else: + session.install("flake8", "flake8-import-order", "flake8-annotations") local_names = _determine_local_import_names(".") args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - "." + ".", ] session.run("flake8", *args) +# +# Black +# + + +@nox.session +def blacken(session: nox.sessions.Session) -> None: + session.install(BLACK_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + session.run("black", *python_files) + + # # Sample Tests # @@ -151,13 +181,24 @@ def lint(session): PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests(session, post_install=None): +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): - session.install("-r", "requirements.txt") + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") if os.path.exists("requirements-test.txt"): - session.install("-r", "requirements-test.txt") + if os.path.exists("constraints-test.txt"): + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") + else: + session.install("-r", "requirements-test.txt") if INSTALL_LIBRARY_FROM_SOURCE: session.install("-e", _get_repo_root()) @@ -172,19 +213,19 @@ def _session_tests(session, post_install=None): # on travis where slow and flaky tests are excluded. # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html success_codes=[0, 5], - env=get_pytest_env_vars() + env=get_pytest_env_vars(), ) @nox.session(python=ALL_VERSIONS) -def py(session): +def py(session: nox.sessions.Session) -> None: """Runs py.test for a sample using the specified version of Python.""" if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip("SKIPPED: {} tests are disabled for this sample.".format( - session.python - )) + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) # @@ -192,7 +233,7 @@ def py(session): # -def _get_repo_root(): +def _get_repo_root() -> Optional[str]: """ Returns the root folder of the project. """ # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) @@ -201,6 +242,11 @@ def _get_repo_root(): break if Path(p / ".git").exists(): return str(p) + # .git is not available in repos cloned via Cloud Build + # setup.py is always in the library's root, so use that instead + # https://github.com/googleapis/synthtool/issues/792 + if Path(p / "setup.py").exists(): + return str(p) p = p.parent raise Exception("Unable to detect repository root.") @@ -210,7 +256,7 @@ def _get_repo_root(): @nox.session @nox.parametrize("path", GENERATED_READMES) -def readmegen(session, path): +def readmegen(session: nox.sessions.Session, path: str) -> None: """(Re-)generates the readme for a sample.""" session.install("jinja2", "pyyaml") dir_ = os.path.dirname(path) diff --git a/samples/hello_happybase/main.py b/samples/hello_happybase/main.py index ade4acbf0..7999fd006 100644 --- a/samples/hello_happybase/main.py +++ b/samples/hello_happybase/main.py @@ -29,6 +29,7 @@ # [START bigtable_hw_imports_happybase] from google.cloud import bigtable from google.cloud import happybase + # [END bigtable_hw_imports_happybase] @@ -43,23 +44,21 @@ def main(project_id, instance_id, table_name): try: # [START bigtable_hw_create_table_happybase] - print('Creating the {} table.'.format(table_name)) - column_family_name = 'cf1' + print("Creating the {} table.".format(table_name)) + column_family_name = "cf1" connection.create_table( - table_name, - { - column_family_name: dict() # Use default options. - }) + table_name, {column_family_name: dict()} # Use default options. + ) # [END bigtable_hw_create_table_happybase] # [START bigtable_hw_write_rows_happybase] - print('Writing some greetings to the table.') + print("Writing some greetings to the table.") table = connection.table(table_name) - column_name = '{fam}:greeting'.format(fam=column_family_name) + column_name = "{fam}:greeting".format(fam=column_family_name) greetings = [ - 'Hello World!', - 'Hello Cloud Bigtable!', - 'Hello HappyBase!', + "Hello World!", + "Hello Cloud Bigtable!", + "Hello HappyBase!", ] for i, value in enumerate(greetings): @@ -73,28 +72,26 @@ def main(project_id, instance_id, table_name): # the best performance, see the documentation: # # https://cloud.google.com/bigtable/docs/schema-design - row_key = 'greeting{}'.format(i) - table.put( - row_key, {column_name.encode('utf-8'): value.encode('utf-8')} - ) + row_key = "greeting{}".format(i) + table.put(row_key, {column_name.encode("utf-8"): value.encode("utf-8")}) # [END bigtable_hw_write_rows_happybase] # [START bigtable_hw_get_by_key_happybase] - print('Getting a single greeting by row key.') - key = 'greeting0'.encode('utf-8') + print("Getting a single greeting by row key.") + key = "greeting0".encode("utf-8") row = table.row(key) - print('\t{}: {}'.format(key, row[column_name.encode('utf-8')])) + print("\t{}: {}".format(key, row[column_name.encode("utf-8")])) # [END bigtable_hw_get_by_key_happybase] # [START bigtable_hw_scan_all_happybase] - print('Scanning for all greetings:') + print("Scanning for all greetings:") for key, row in table.scan(): - print('\t{}: {}'.format(key, row[column_name.encode('utf-8')])) + print("\t{}: {}".format(key, row[column_name.encode("utf-8")])) # [END bigtable_hw_scan_all_happybase] # [START bigtable_hw_delete_table_happybase] - print('Deleting the {} table.'.format(table_name)) + print("Deleting the {} table.".format(table_name)) connection.delete_table(table_name) # [END bigtable_hw_delete_table_happybase] @@ -102,17 +99,17 @@ def main(project_id, instance_id, table_name): connection.close() -if __name__ == '__main__': +if __name__ == "__main__": parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.ArgumentDefaultsHelpFormatter) - parser.add_argument('project_id', help='Your Cloud Platform project ID.') + description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter + ) + parser.add_argument("project_id", help="Your Cloud Platform project ID.") parser.add_argument( - 'instance_id', help='ID of the Cloud Bigtable instance to connect to.') + "instance_id", help="ID of the Cloud Bigtable instance to connect to." + ) parser.add_argument( - '--table', - help='Table to create and destroy.', - default='Hello-Bigtable') + "--table", help="Table to create and destroy.", default="Hello-Bigtable" + ) args = parser.parse_args() main(args.project_id, args.instance_id, args.table) diff --git a/samples/hello_happybase/main_test.py b/samples/hello_happybase/main_test.py index f72fc0b2e..6a63750da 100644 --- a/samples/hello_happybase/main_test.py +++ b/samples/hello_happybase/main_test.py @@ -17,25 +17,21 @@ from main import main -PROJECT = os.environ['GOOGLE_CLOUD_PROJECT'] -BIGTABLE_INSTANCE = os.environ['BIGTABLE_INSTANCE'] -TABLE_NAME_FORMAT = 'hello-world-hb-test-{}' +PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] +BIGTABLE_INSTANCE = os.environ["BIGTABLE_INSTANCE"] +TABLE_NAME_FORMAT = "hello-world-hb-test-{}" TABLE_NAME_RANGE = 10000 def test_main(capsys): - table_name = TABLE_NAME_FORMAT.format( - random.randrange(TABLE_NAME_RANGE)) - main( - PROJECT, - BIGTABLE_INSTANCE, - table_name) + table_name = TABLE_NAME_FORMAT.format(random.randrange(TABLE_NAME_RANGE)) + main(PROJECT, BIGTABLE_INSTANCE, table_name) out, _ = capsys.readouterr() - assert 'Creating the {} table.'.format(table_name) in out - assert 'Writing some greetings to the table.' in out - assert 'Getting a single greeting by row key.' in out - assert 'Hello World!' in out - assert 'Scanning for all greetings' in out - assert 'Hello Cloud Bigtable!' in out - assert 'Deleting the {} table.'.format(table_name) in out + assert "Creating the {} table.".format(table_name) in out + assert "Writing some greetings to the table." in out + assert "Getting a single greeting by row key." in out + assert "Hello World!" in out + assert "Scanning for all greetings" in out + assert "Hello Cloud Bigtable!" in out + assert "Deleting the {} table.".format(table_name) in out diff --git a/samples/hello_happybase/noxfile.py b/samples/hello_happybase/noxfile.py index ba55d7ce5..93a9122cc 100644 --- a/samples/hello_happybase/noxfile.py +++ b/samples/hello_happybase/noxfile.py @@ -17,6 +17,7 @@ import os from pathlib import Path import sys +from typing import Callable, Dict, List, Optional import nox @@ -27,8 +28,9 @@ # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING -# Copy `noxfile_config.py` to your directory and modify it instead. +BLACK_VERSION = "black==19.10b0" +# Copy `noxfile_config.py` to your directory and modify it instead. # `TEST_CONFIG` dict is a configuration hook that allows users to # modify the test configurations. The values here should be in sync @@ -37,24 +39,29 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - 'ignored_versions': ["2.7"], - + "ignored_versions": [], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": False, # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - 'envs': {}, + "envs": {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append('.') + sys.path.append(".") from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -64,36 +71,43 @@ TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) -def get_pytest_env_vars(): +def get_pytest_env_vars() -> Dict[str, str]: """Returns a dict for pytest invocation.""" ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG['gcloud_project_env'] + env_key = TEST_CONFIG["gcloud_project_env"] # This should error out if not set. - ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG['envs']) + ret.update(TEST_CONFIG["envs"]) return ret # DO NOT EDIT - automatically generated. -# All versions used to tested samples. -ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] +# All versions used to test samples. +ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) -INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False)) +INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( + "True", + "true", +) + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + # # Style Checks # -def _determine_local_import_names(start_dir): +def _determine_local_import_names(start_dir: str) -> List[str]: """Determines all import names that should be considered "local". This is used when running the linter to insure that import order is @@ -131,18 +145,34 @@ def _determine_local_import_names(start_dir): @nox.session -def lint(session): - session.install("flake8", "flake8-import-order") +def lint(session: nox.sessions.Session) -> None: + if not TEST_CONFIG["enforce_type_hints"]: + session.install("flake8", "flake8-import-order") + else: + session.install("flake8", "flake8-import-order", "flake8-annotations") local_names = _determine_local_import_names(".") args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - "." + ".", ] session.run("flake8", *args) +# +# Black +# + + +@nox.session +def blacken(session: nox.sessions.Session) -> None: + session.install(BLACK_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + session.run("black", *python_files) + + # # Sample Tests # @@ -151,13 +181,24 @@ def lint(session): PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests(session, post_install=None): +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): - session.install("-r", "requirements.txt") + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") if os.path.exists("requirements-test.txt"): - session.install("-r", "requirements-test.txt") + if os.path.exists("constraints-test.txt"): + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") + else: + session.install("-r", "requirements-test.txt") if INSTALL_LIBRARY_FROM_SOURCE: session.install("-e", _get_repo_root()) @@ -172,19 +213,19 @@ def _session_tests(session, post_install=None): # on travis where slow and flaky tests are excluded. # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html success_codes=[0, 5], - env=get_pytest_env_vars() + env=get_pytest_env_vars(), ) @nox.session(python=ALL_VERSIONS) -def py(session): +def py(session: nox.sessions.Session) -> None: """Runs py.test for a sample using the specified version of Python.""" if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip("SKIPPED: {} tests are disabled for this sample.".format( - session.python - )) + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) # @@ -192,7 +233,7 @@ def py(session): # -def _get_repo_root(): +def _get_repo_root() -> Optional[str]: """ Returns the root folder of the project. """ # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) @@ -201,6 +242,11 @@ def _get_repo_root(): break if Path(p / ".git").exists(): return str(p) + # .git is not available in repos cloned via Cloud Build + # setup.py is always in the library's root, so use that instead + # https://github.com/googleapis/synthtool/issues/792 + if Path(p / "setup.py").exists(): + return str(p) p = p.parent raise Exception("Unable to detect repository root.") @@ -210,7 +256,7 @@ def _get_repo_root(): @nox.session @nox.parametrize("path", GENERATED_READMES) -def readmegen(session, path): +def readmegen(session: nox.sessions.Session, path: str) -> None: """(Re-)generates the readme for a sample.""" session.install("jinja2", "pyyaml") dir_ = os.path.dirname(path) diff --git a/samples/instanceadmin/noxfile.py b/samples/instanceadmin/noxfile.py index ba55d7ce5..93a9122cc 100644 --- a/samples/instanceadmin/noxfile.py +++ b/samples/instanceadmin/noxfile.py @@ -17,6 +17,7 @@ import os from pathlib import Path import sys +from typing import Callable, Dict, List, Optional import nox @@ -27,8 +28,9 @@ # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING -# Copy `noxfile_config.py` to your directory and modify it instead. +BLACK_VERSION = "black==19.10b0" +# Copy `noxfile_config.py` to your directory and modify it instead. # `TEST_CONFIG` dict is a configuration hook that allows users to # modify the test configurations. The values here should be in sync @@ -37,24 +39,29 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - 'ignored_versions': ["2.7"], - + "ignored_versions": [], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": False, # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - 'envs': {}, + "envs": {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append('.') + sys.path.append(".") from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -64,36 +71,43 @@ TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) -def get_pytest_env_vars(): +def get_pytest_env_vars() -> Dict[str, str]: """Returns a dict for pytest invocation.""" ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG['gcloud_project_env'] + env_key = TEST_CONFIG["gcloud_project_env"] # This should error out if not set. - ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG['envs']) + ret.update(TEST_CONFIG["envs"]) return ret # DO NOT EDIT - automatically generated. -# All versions used to tested samples. -ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] +# All versions used to test samples. +ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) -INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False)) +INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( + "True", + "true", +) + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + # # Style Checks # -def _determine_local_import_names(start_dir): +def _determine_local_import_names(start_dir: str) -> List[str]: """Determines all import names that should be considered "local". This is used when running the linter to insure that import order is @@ -131,18 +145,34 @@ def _determine_local_import_names(start_dir): @nox.session -def lint(session): - session.install("flake8", "flake8-import-order") +def lint(session: nox.sessions.Session) -> None: + if not TEST_CONFIG["enforce_type_hints"]: + session.install("flake8", "flake8-import-order") + else: + session.install("flake8", "flake8-import-order", "flake8-annotations") local_names = _determine_local_import_names(".") args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - "." + ".", ] session.run("flake8", *args) +# +# Black +# + + +@nox.session +def blacken(session: nox.sessions.Session) -> None: + session.install(BLACK_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + session.run("black", *python_files) + + # # Sample Tests # @@ -151,13 +181,24 @@ def lint(session): PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests(session, post_install=None): +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): - session.install("-r", "requirements.txt") + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") if os.path.exists("requirements-test.txt"): - session.install("-r", "requirements-test.txt") + if os.path.exists("constraints-test.txt"): + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") + else: + session.install("-r", "requirements-test.txt") if INSTALL_LIBRARY_FROM_SOURCE: session.install("-e", _get_repo_root()) @@ -172,19 +213,19 @@ def _session_tests(session, post_install=None): # on travis where slow and flaky tests are excluded. # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html success_codes=[0, 5], - env=get_pytest_env_vars() + env=get_pytest_env_vars(), ) @nox.session(python=ALL_VERSIONS) -def py(session): +def py(session: nox.sessions.Session) -> None: """Runs py.test for a sample using the specified version of Python.""" if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip("SKIPPED: {} tests are disabled for this sample.".format( - session.python - )) + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) # @@ -192,7 +233,7 @@ def py(session): # -def _get_repo_root(): +def _get_repo_root() -> Optional[str]: """ Returns the root folder of the project. """ # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) @@ -201,6 +242,11 @@ def _get_repo_root(): break if Path(p / ".git").exists(): return str(p) + # .git is not available in repos cloned via Cloud Build + # setup.py is always in the library's root, so use that instead + # https://github.com/googleapis/synthtool/issues/792 + if Path(p / "setup.py").exists(): + return str(p) p = p.parent raise Exception("Unable to detect repository root.") @@ -210,7 +256,7 @@ def _get_repo_root(): @nox.session @nox.parametrize("path", GENERATED_READMES) -def readmegen(session, path): +def readmegen(session: nox.sessions.Session, path: str) -> None: """(Re-)generates the readme for a sample.""" session.install("jinja2", "pyyaml") dir_ = os.path.dirname(path) diff --git a/samples/metricscaler/metricscaler.py b/samples/metricscaler/metricscaler.py index 43b430859..d29e40a39 100644 --- a/samples/metricscaler/metricscaler.py +++ b/samples/metricscaler/metricscaler.py @@ -25,9 +25,9 @@ from google.cloud.bigtable import enums from google.cloud.monitoring_v3 import query -PROJECT = os.environ['GOOGLE_CLOUD_PROJECT'] +PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] -logger = logging.getLogger('bigtable.metricscaler') +logger = logging.getLogger("bigtable.metricscaler") logger.addHandler(logging.StreamHandler()) logger.setLevel(logging.INFO) @@ -40,12 +40,15 @@ def get_cpu_load(bigtable_instance, bigtable_cluster): """ # [START bigtable_cpu] client = monitoring_v3.MetricServiceClient() - cpu_query = query.Query(client, - project=PROJECT, - metric_type='bigtable.googleapis.com/' - 'cluster/cpu_load', - minutes=5) - cpu_query = cpu_query.select_resources(instance=bigtable_instance, cluster=bigtable_cluster) + cpu_query = query.Query( + client, + project=PROJECT, + metric_type="bigtable.googleapis.com/" "cluster/cpu_load", + minutes=5, + ) + cpu_query = cpu_query.select_resources( + instance=bigtable_instance, cluster=bigtable_cluster + ) cpu = next(cpu_query.iter()) return cpu.points[0].value.double_value # [END bigtable_cpu] @@ -59,12 +62,15 @@ def get_storage_utilization(bigtable_instance, bigtable_cluster): """ # [START bigtable_metric_scaler_storage_utilization] client = monitoring_v3.MetricServiceClient() - utilization_query = query.Query(client, - project=PROJECT, - metric_type='bigtable.googleapis.com/' - 'cluster/storage_utilization', - minutes=5) - utilization_query = utilization_query.select_resources(instance=bigtable_instance, cluster=bigtable_cluster) + utilization_query = query.Query( + client, + project=PROJECT, + metric_type="bigtable.googleapis.com/" "cluster/storage_utilization", + minutes=5, + ) + utilization_query = utilization_query.select_resources( + instance=bigtable_instance, cluster=bigtable_cluster + ) utilization = next(utilization_query.iter()) return utilization.points[0].value.double_value # [END bigtable_metric_scaler_storage_utilization] @@ -114,20 +120,24 @@ def scale_bigtable(bigtable_instance, bigtable_cluster, scale_up): if scale_up: if current_node_count < max_node_count: - new_node_count = min( - current_node_count + size_change_step, max_node_count) + new_node_count = min(current_node_count + size_change_step, max_node_count) cluster.serve_nodes = new_node_count cluster.update() - logger.info('Scaled up from {} to {} nodes.'.format( - current_node_count, new_node_count)) + logger.info( + "Scaled up from {} to {} nodes.".format( + current_node_count, new_node_count + ) + ) else: if current_node_count > min_node_count: - new_node_count = max( - current_node_count - size_change_step, min_node_count) + new_node_count = max(current_node_count - size_change_step, min_node_count) cluster.serve_nodes = new_node_count cluster.update() - logger.info('Scaled down from {} to {} nodes.'.format( - current_node_count, new_node_count)) + logger.info( + "Scaled down from {} to {} nodes.".format( + current_node_count, new_node_count + ) + ) # [END bigtable_scale] @@ -138,7 +148,7 @@ def main( low_cpu_threshold, high_storage_threshold, short_sleep, - long_sleep + long_sleep, ): """Main loop runner that autoscales Cloud Bigtable. @@ -154,8 +164,8 @@ def main( """ cluster_cpu = get_cpu_load(bigtable_instance, bigtable_cluster) cluster_storage = get_storage_utilization(bigtable_instance, bigtable_cluster) - logger.info('Detected cpu of {}'.format(cluster_cpu)) - logger.info('Detected storage utilization of {}'.format(cluster_storage)) + logger.info("Detected cpu of {}".format(cluster_cpu)) + logger.info("Detected storage utilization of {}".format(cluster_storage)) try: if cluster_cpu > high_cpu_threshold or cluster_storage > high_storage_threshold: scale_bigtable(bigtable_instance, bigtable_cluster, True) @@ -165,44 +175,50 @@ def main( scale_bigtable(bigtable_instance, bigtable_cluster, False) time.sleep(long_sleep) else: - logger.info('CPU within threshold, sleeping.') + logger.info("CPU within threshold, sleeping.") time.sleep(short_sleep) except Exception as e: logger.error("Error during scaling: %s", e) -if __name__ == '__main__': +if __name__ == "__main__": parser = argparse.ArgumentParser( - description='Scales Cloud Bigtable clusters based on CPU usage.') + description="Scales Cloud Bigtable clusters based on CPU usage." + ) parser.add_argument( - 'bigtable_instance', - help='ID of the Cloud Bigtable instance to connect to.') + "bigtable_instance", help="ID of the Cloud Bigtable instance to connect to." + ) parser.add_argument( - 'bigtable_cluster', - help='ID of the Cloud Bigtable cluster to connect to.') + "bigtable_cluster", help="ID of the Cloud Bigtable cluster to connect to." + ) parser.add_argument( - '--high_cpu_threshold', - help='If Cloud Bigtable CPU usage is above this threshold, scale up', - default=0.6) + "--high_cpu_threshold", + help="If Cloud Bigtable CPU usage is above this threshold, scale up", + default=0.6, + ) parser.add_argument( - '--low_cpu_threshold', - help='If Cloud Bigtable CPU usage is below this threshold, scale down', - default=0.2) + "--low_cpu_threshold", + help="If Cloud Bigtable CPU usage is below this threshold, scale down", + default=0.2, + ) parser.add_argument( - '--high_storage_threshold', - help='If Cloud Bigtable storage utilization is above this threshold, ' - 'scale up', - default=0.6) + "--high_storage_threshold", + help="If Cloud Bigtable storage utilization is above this threshold, " + "scale up", + default=0.6, + ) parser.add_argument( - '--short_sleep', - help='How long to sleep in seconds between checking metrics after no ' - 'scale operation', - default=60) + "--short_sleep", + help="How long to sleep in seconds between checking metrics after no " + "scale operation", + default=60, + ) parser.add_argument( - '--long_sleep', - help='How long to sleep in seconds between checking metrics after a ' - 'scaling operation', - default=60 * 10) + "--long_sleep", + help="How long to sleep in seconds between checking metrics after a " + "scaling operation", + default=60 * 10, + ) args = parser.parse_args() while True: @@ -213,4 +229,5 @@ def main( float(args.low_cpu_threshold), float(args.high_storage_threshold), int(args.short_sleep), - int(args.long_sleep)) + int(args.long_sleep), + ) diff --git a/samples/metricscaler/metricscaler_test.py b/samples/metricscaler/metricscaler_test.py index 13d463325..4420605ec 100644 --- a/samples/metricscaler/metricscaler_test.py +++ b/samples/metricscaler/metricscaler_test.py @@ -31,10 +31,10 @@ from metricscaler import scale_bigtable -PROJECT = os.environ['GOOGLE_CLOUD_PROJECT'] -BIGTABLE_ZONE = os.environ['BIGTABLE_ZONE'] +PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] +BIGTABLE_ZONE = os.environ["BIGTABLE_ZONE"] SIZE_CHANGE_STEP = 3 -INSTANCE_ID_FORMAT = 'metric-scale-test-{}' +INSTANCE_ID_FORMAT = "metric-scale-test-{}" BIGTABLE_INSTANCE = INSTANCE_ID_FORMAT.format(str(uuid.uuid4())[:10]) BIGTABLE_DEV_INSTANCE = INSTANCE_ID_FORMAT.format(str(uuid.uuid4())[:10]) @@ -42,14 +42,14 @@ # System tests to verify API calls succeed -@patch('metricscaler.query') +@patch("metricscaler.query") def test_get_cpu_load(monitoring_v3_query): iter_mock = monitoring_v3_query.Query().select_resources().iter iter_mock.return_value = iter([Mock(points=[Mock(value=Mock(double_value=1.0))])]) assert float(get_cpu_load(BIGTABLE_INSTANCE, BIGTABLE_INSTANCE)) > 0.0 -@patch('metricscaler.query') +@patch("metricscaler.query") def test_get_storage_utilization(monitoring_v3_query): iter_mock = monitoring_v3_query.Query().select_resources().iter iter_mock.return_value = iter([Mock(points=[Mock(value=Mock(double_value=1.0))])]) @@ -65,14 +65,18 @@ def instance(): serve_nodes = 1 storage_type = enums.StorageType.SSD production = enums.Instance.Type.PRODUCTION - labels = {'prod-label': 'prod-label'} - instance = client.instance(BIGTABLE_INSTANCE, instance_type=production, - labels=labels) + labels = {"prod-label": "prod-label"} + instance = client.instance( + BIGTABLE_INSTANCE, instance_type=production, labels=labels + ) if not instance.exists(): - cluster = instance.cluster(cluster_id, location_id=BIGTABLE_ZONE, - serve_nodes=serve_nodes, - default_storage_type=storage_type) + cluster = instance.cluster( + cluster_id, + location_id=BIGTABLE_ZONE, + serve_nodes=serve_nodes, + default_storage_type=storage_type, + ) instance.create(clusters=[cluster]) # Eventual consistency check @@ -92,14 +96,15 @@ def dev_instance(): storage_type = enums.StorageType.SSD development = enums.Instance.Type.DEVELOPMENT - labels = {'dev-label': 'dev-label'} - instance = client.instance(BIGTABLE_DEV_INSTANCE, - instance_type=development, - labels=labels) + labels = {"dev-label": "dev-label"} + instance = client.instance( + BIGTABLE_DEV_INSTANCE, instance_type=development, labels=labels + ) if not instance.exists(): - cluster = instance.cluster(cluster_id, location_id=BIGTABLE_ZONE, - default_storage_type=storage_type) + cluster = instance.cluster( + cluster_id, location_id=BIGTABLE_ZONE, default_storage_type=storage_type + ) instance.create(clusters=[cluster]) # Eventual consistency check @@ -117,9 +122,7 @@ def __init__(self, expected_node_count): def __call__(self, cluster): expected = self.expected_node_count - print( - f"Expected node count: {expected}; found: {cluster.serve_nodes}" - ) + print(f"Expected node count: {expected}; found: {cluster.serve_nodes}") return cluster.serve_nodes == expected @@ -146,7 +149,8 @@ def test_scale_bigtable(instance): ) scaled_node_count_predicate.__name__ = "scaled_node_count_predicate" _scaled_node_count = RetryInstanceState( - instance_predicate=scaled_node_count_predicate, max_tries=10, + instance_predicate=scaled_node_count_predicate, + max_tries=10, ) _scaled_node_count(cluster.reload)() @@ -155,7 +159,8 @@ def test_scale_bigtable(instance): restored_node_count_predicate = ClusterNodeCountPredicate(original_node_count) restored_node_count_predicate.__name__ = "restored_node_count_predicate" _restored_node_count = RetryInstanceState( - instance_predicate=restored_node_count_predicate, max_tries=10, + instance_predicate=restored_node_count_predicate, + max_tries=10, ) _restored_node_count(cluster.reload)() @@ -165,10 +170,10 @@ def test_handle_dev_instance(capsys, dev_instance): scale_bigtable(BIGTABLE_DEV_INSTANCE, BIGTABLE_DEV_INSTANCE, True) -@patch('time.sleep') -@patch('metricscaler.get_storage_utilization') -@patch('metricscaler.get_cpu_load') -@patch('metricscaler.scale_bigtable') +@patch("time.sleep") +@patch("metricscaler.get_storage_utilization") +@patch("metricscaler.get_cpu_load") +@patch("metricscaler.scale_bigtable") def test_main(scale_bigtable, get_cpu_load, get_storage_utilization, sleep): SHORT_SLEEP = 5 LONG_SLEEP = 10 @@ -177,57 +182,46 @@ def test_main(scale_bigtable, get_cpu_load, get_storage_utilization, sleep): get_cpu_load.return_value = 0.5 get_storage_utilization.return_value = 0.5 - main(BIGTABLE_INSTANCE, BIGTABLE_INSTANCE, 0.6, 0.3, 0.6, SHORT_SLEEP, - LONG_SLEEP) + main(BIGTABLE_INSTANCE, BIGTABLE_INSTANCE, 0.6, 0.3, 0.6, SHORT_SLEEP, LONG_SLEEP) scale_bigtable.assert_not_called() scale_bigtable.reset_mock() # Test high CPU, okay storage utilization get_cpu_load.return_value = 0.7 get_storage_utilization.return_value = 0.5 - main(BIGTABLE_INSTANCE, BIGTABLE_INSTANCE, 0.6, 0.3, 0.6, SHORT_SLEEP, - LONG_SLEEP) - scale_bigtable.assert_called_once_with(BIGTABLE_INSTANCE, - BIGTABLE_INSTANCE, True) + main(BIGTABLE_INSTANCE, BIGTABLE_INSTANCE, 0.6, 0.3, 0.6, SHORT_SLEEP, LONG_SLEEP) + scale_bigtable.assert_called_once_with(BIGTABLE_INSTANCE, BIGTABLE_INSTANCE, True) scale_bigtable.reset_mock() # Test low CPU, okay storage utilization get_storage_utilization.return_value = 0.5 get_cpu_load.return_value = 0.2 - main(BIGTABLE_INSTANCE, BIGTABLE_INSTANCE, 0.6, 0.3, 0.6, SHORT_SLEEP, - LONG_SLEEP) - scale_bigtable.assert_called_once_with(BIGTABLE_INSTANCE, - BIGTABLE_INSTANCE, False) + main(BIGTABLE_INSTANCE, BIGTABLE_INSTANCE, 0.6, 0.3, 0.6, SHORT_SLEEP, LONG_SLEEP) + scale_bigtable.assert_called_once_with(BIGTABLE_INSTANCE, BIGTABLE_INSTANCE, False) scale_bigtable.reset_mock() # Test okay CPU, high storage utilization get_cpu_load.return_value = 0.5 get_storage_utilization.return_value = 0.7 - main(BIGTABLE_INSTANCE, BIGTABLE_INSTANCE, 0.6, 0.3, 0.6, SHORT_SLEEP, - LONG_SLEEP) - scale_bigtable.assert_called_once_with(BIGTABLE_INSTANCE, - BIGTABLE_INSTANCE, True) + main(BIGTABLE_INSTANCE, BIGTABLE_INSTANCE, 0.6, 0.3, 0.6, SHORT_SLEEP, LONG_SLEEP) + scale_bigtable.assert_called_once_with(BIGTABLE_INSTANCE, BIGTABLE_INSTANCE, True) scale_bigtable.reset_mock() # Test high CPU, high storage utilization get_cpu_load.return_value = 0.7 get_storage_utilization.return_value = 0.7 - main(BIGTABLE_INSTANCE, BIGTABLE_INSTANCE, 0.6, 0.3, 0.6, SHORT_SLEEP, - LONG_SLEEP) - scale_bigtable.assert_called_once_with(BIGTABLE_INSTANCE, - BIGTABLE_INSTANCE, True) + main(BIGTABLE_INSTANCE, BIGTABLE_INSTANCE, 0.6, 0.3, 0.6, SHORT_SLEEP, LONG_SLEEP) + scale_bigtable.assert_called_once_with(BIGTABLE_INSTANCE, BIGTABLE_INSTANCE, True) scale_bigtable.reset_mock() # Test low CPU, high storage utilization get_cpu_load.return_value = 0.2 get_storage_utilization.return_value = 0.7 - main(BIGTABLE_INSTANCE, BIGTABLE_INSTANCE, 0.6, 0.3, 0.6, SHORT_SLEEP, - LONG_SLEEP) - scale_bigtable.assert_called_once_with(BIGTABLE_INSTANCE, - BIGTABLE_INSTANCE, True) + main(BIGTABLE_INSTANCE, BIGTABLE_INSTANCE, 0.6, 0.3, 0.6, SHORT_SLEEP, LONG_SLEEP) + scale_bigtable.assert_called_once_with(BIGTABLE_INSTANCE, BIGTABLE_INSTANCE, True) scale_bigtable.reset_mock() -if __name__ == '__main__': +if __name__ == "__main__": test_get_cpu_load() diff --git a/samples/metricscaler/noxfile.py b/samples/metricscaler/noxfile.py index ba55d7ce5..93a9122cc 100644 --- a/samples/metricscaler/noxfile.py +++ b/samples/metricscaler/noxfile.py @@ -17,6 +17,7 @@ import os from pathlib import Path import sys +from typing import Callable, Dict, List, Optional import nox @@ -27,8 +28,9 @@ # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING -# Copy `noxfile_config.py` to your directory and modify it instead. +BLACK_VERSION = "black==19.10b0" +# Copy `noxfile_config.py` to your directory and modify it instead. # `TEST_CONFIG` dict is a configuration hook that allows users to # modify the test configurations. The values here should be in sync @@ -37,24 +39,29 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - 'ignored_versions': ["2.7"], - + "ignored_versions": [], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": False, # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - 'envs': {}, + "envs": {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append('.') + sys.path.append(".") from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -64,36 +71,43 @@ TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) -def get_pytest_env_vars(): +def get_pytest_env_vars() -> Dict[str, str]: """Returns a dict for pytest invocation.""" ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG['gcloud_project_env'] + env_key = TEST_CONFIG["gcloud_project_env"] # This should error out if not set. - ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG['envs']) + ret.update(TEST_CONFIG["envs"]) return ret # DO NOT EDIT - automatically generated. -# All versions used to tested samples. -ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] +# All versions used to test samples. +ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) -INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False)) +INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( + "True", + "true", +) + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + # # Style Checks # -def _determine_local_import_names(start_dir): +def _determine_local_import_names(start_dir: str) -> List[str]: """Determines all import names that should be considered "local". This is used when running the linter to insure that import order is @@ -131,18 +145,34 @@ def _determine_local_import_names(start_dir): @nox.session -def lint(session): - session.install("flake8", "flake8-import-order") +def lint(session: nox.sessions.Session) -> None: + if not TEST_CONFIG["enforce_type_hints"]: + session.install("flake8", "flake8-import-order") + else: + session.install("flake8", "flake8-import-order", "flake8-annotations") local_names = _determine_local_import_names(".") args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - "." + ".", ] session.run("flake8", *args) +# +# Black +# + + +@nox.session +def blacken(session: nox.sessions.Session) -> None: + session.install(BLACK_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + session.run("black", *python_files) + + # # Sample Tests # @@ -151,13 +181,24 @@ def lint(session): PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests(session, post_install=None): +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): - session.install("-r", "requirements.txt") + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") if os.path.exists("requirements-test.txt"): - session.install("-r", "requirements-test.txt") + if os.path.exists("constraints-test.txt"): + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") + else: + session.install("-r", "requirements-test.txt") if INSTALL_LIBRARY_FROM_SOURCE: session.install("-e", _get_repo_root()) @@ -172,19 +213,19 @@ def _session_tests(session, post_install=None): # on travis where slow and flaky tests are excluded. # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html success_codes=[0, 5], - env=get_pytest_env_vars() + env=get_pytest_env_vars(), ) @nox.session(python=ALL_VERSIONS) -def py(session): +def py(session: nox.sessions.Session) -> None: """Runs py.test for a sample using the specified version of Python.""" if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip("SKIPPED: {} tests are disabled for this sample.".format( - session.python - )) + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) # @@ -192,7 +233,7 @@ def py(session): # -def _get_repo_root(): +def _get_repo_root() -> Optional[str]: """ Returns the root folder of the project. """ # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) @@ -201,6 +242,11 @@ def _get_repo_root(): break if Path(p / ".git").exists(): return str(p) + # .git is not available in repos cloned via Cloud Build + # setup.py is always in the library's root, so use that instead + # https://github.com/googleapis/synthtool/issues/792 + if Path(p / "setup.py").exists(): + return str(p) p = p.parent raise Exception("Unable to detect repository root.") @@ -210,7 +256,7 @@ def _get_repo_root(): @nox.session @nox.parametrize("path", GENERATED_READMES) -def readmegen(session, path): +def readmegen(session: nox.sessions.Session, path: str) -> None: """(Re-)generates the readme for a sample.""" session.install("jinja2", "pyyaml") dir_ = os.path.dirname(path) diff --git a/samples/quickstart/main.py b/samples/quickstart/main.py index 3763296f1..50bfe6394 100644 --- a/samples/quickstart/main.py +++ b/samples/quickstart/main.py @@ -20,8 +20,7 @@ from google.cloud import bigtable -def main(project_id="project-id", instance_id="instance-id", - table_id="my-table"): +def main(project_id="project-id", instance_id="instance-id", table_id="my-table"): # Create a Cloud Bigtable client. client = bigtable.Client(project=project_id) @@ -31,27 +30,27 @@ def main(project_id="project-id", instance_id="instance-id", # Open an existing table. table = instance.table(table_id) - row_key = 'r1' - row = table.read_row(row_key.encode('utf-8')) + row_key = "r1" + row = table.read_row(row_key.encode("utf-8")) - column_family_id = 'cf1' - column_id = 'c1'.encode('utf-8') - value = row.cells[column_family_id][column_id][0].value.decode('utf-8') + column_family_id = "cf1" + column_id = "c1".encode("utf-8") + value = row.cells[column_family_id][column_id][0].value.decode("utf-8") - print('Row key: {}\nData: {}'.format(row_key, value)) + print("Row key: {}\nData: {}".format(row_key, value)) -if __name__ == '__main__': +if __name__ == "__main__": parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.ArgumentDefaultsHelpFormatter) - parser.add_argument('project_id', help='Your Cloud Platform project ID.') + description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter + ) + parser.add_argument("project_id", help="Your Cloud Platform project ID.") parser.add_argument( - 'instance_id', help='ID of the Cloud Bigtable instance to connect to.') + "instance_id", help="ID of the Cloud Bigtable instance to connect to." + ) parser.add_argument( - '--table', - help='Existing table used in the quickstart.', - default='my-table') + "--table", help="Existing table used in the quickstart.", default="my-table" + ) args = parser.parse_args() main(args.project_id, args.instance_id, args.table) diff --git a/samples/quickstart/main_test.py b/samples/quickstart/main_test.py index ea1e8776b..46d578b6b 100644 --- a/samples/quickstart/main_test.py +++ b/samples/quickstart/main_test.py @@ -21,9 +21,9 @@ from main import main -PROJECT = os.environ['GOOGLE_CLOUD_PROJECT'] -BIGTABLE_INSTANCE = os.environ['BIGTABLE_INSTANCE'] -TABLE_ID_FORMAT = 'quickstart-test-{}' +PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] +BIGTABLE_INSTANCE = os.environ["BIGTABLE_INSTANCE"] +TABLE_ID_FORMAT = "quickstart-test-{}" @pytest.fixture() @@ -32,7 +32,7 @@ def table(): client = bigtable.Client(project=PROJECT, admin=True) instance = client.instance(BIGTABLE_INSTANCE) table = instance.table(table_id) - column_family_id = 'cf1' + column_family_id = "cf1" column_families = {column_family_id: None} table.create(column_families=column_families) @@ -50,4 +50,4 @@ def test_main(capsys, table): main(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - assert 'Row key: r1\nData: test-value\n' in out + assert "Row key: r1\nData: test-value\n" in out diff --git a/samples/quickstart/noxfile.py b/samples/quickstart/noxfile.py index ba55d7ce5..93a9122cc 100644 --- a/samples/quickstart/noxfile.py +++ b/samples/quickstart/noxfile.py @@ -17,6 +17,7 @@ import os from pathlib import Path import sys +from typing import Callable, Dict, List, Optional import nox @@ -27,8 +28,9 @@ # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING -# Copy `noxfile_config.py` to your directory and modify it instead. +BLACK_VERSION = "black==19.10b0" +# Copy `noxfile_config.py` to your directory and modify it instead. # `TEST_CONFIG` dict is a configuration hook that allows users to # modify the test configurations. The values here should be in sync @@ -37,24 +39,29 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - 'ignored_versions': ["2.7"], - + "ignored_versions": [], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": False, # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - 'envs': {}, + "envs": {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append('.') + sys.path.append(".") from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -64,36 +71,43 @@ TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) -def get_pytest_env_vars(): +def get_pytest_env_vars() -> Dict[str, str]: """Returns a dict for pytest invocation.""" ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG['gcloud_project_env'] + env_key = TEST_CONFIG["gcloud_project_env"] # This should error out if not set. - ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG['envs']) + ret.update(TEST_CONFIG["envs"]) return ret # DO NOT EDIT - automatically generated. -# All versions used to tested samples. -ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] +# All versions used to test samples. +ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) -INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False)) +INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( + "True", + "true", +) + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + # # Style Checks # -def _determine_local_import_names(start_dir): +def _determine_local_import_names(start_dir: str) -> List[str]: """Determines all import names that should be considered "local". This is used when running the linter to insure that import order is @@ -131,18 +145,34 @@ def _determine_local_import_names(start_dir): @nox.session -def lint(session): - session.install("flake8", "flake8-import-order") +def lint(session: nox.sessions.Session) -> None: + if not TEST_CONFIG["enforce_type_hints"]: + session.install("flake8", "flake8-import-order") + else: + session.install("flake8", "flake8-import-order", "flake8-annotations") local_names = _determine_local_import_names(".") args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - "." + ".", ] session.run("flake8", *args) +# +# Black +# + + +@nox.session +def blacken(session: nox.sessions.Session) -> None: + session.install(BLACK_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + session.run("black", *python_files) + + # # Sample Tests # @@ -151,13 +181,24 @@ def lint(session): PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests(session, post_install=None): +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): - session.install("-r", "requirements.txt") + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") if os.path.exists("requirements-test.txt"): - session.install("-r", "requirements-test.txt") + if os.path.exists("constraints-test.txt"): + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") + else: + session.install("-r", "requirements-test.txt") if INSTALL_LIBRARY_FROM_SOURCE: session.install("-e", _get_repo_root()) @@ -172,19 +213,19 @@ def _session_tests(session, post_install=None): # on travis where slow and flaky tests are excluded. # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html success_codes=[0, 5], - env=get_pytest_env_vars() + env=get_pytest_env_vars(), ) @nox.session(python=ALL_VERSIONS) -def py(session): +def py(session: nox.sessions.Session) -> None: """Runs py.test for a sample using the specified version of Python.""" if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip("SKIPPED: {} tests are disabled for this sample.".format( - session.python - )) + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) # @@ -192,7 +233,7 @@ def py(session): # -def _get_repo_root(): +def _get_repo_root() -> Optional[str]: """ Returns the root folder of the project. """ # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) @@ -201,6 +242,11 @@ def _get_repo_root(): break if Path(p / ".git").exists(): return str(p) + # .git is not available in repos cloned via Cloud Build + # setup.py is always in the library's root, so use that instead + # https://github.com/googleapis/synthtool/issues/792 + if Path(p / "setup.py").exists(): + return str(p) p = p.parent raise Exception("Unable to detect repository root.") @@ -210,7 +256,7 @@ def _get_repo_root(): @nox.session @nox.parametrize("path", GENERATED_READMES) -def readmegen(session, path): +def readmegen(session: nox.sessions.Session, path: str) -> None: """(Re-)generates the readme for a sample.""" session.install("jinja2", "pyyaml") dir_ = os.path.dirname(path) diff --git a/samples/quickstart_happybase/main.py b/samples/quickstart_happybase/main.py index 056e3666b..6a05c4cbd 100644 --- a/samples/quickstart_happybase/main.py +++ b/samples/quickstart_happybase/main.py @@ -20,8 +20,7 @@ from google.cloud import happybase -def main(project_id="project-id", instance_id="instance-id", - table_id="my-table"): +def main(project_id="project-id", instance_id="instance-id", table_id="my-table"): # Creates a Bigtable client client = bigtable.Client(project=project_id) @@ -34,28 +33,28 @@ def main(project_id="project-id", instance_id="instance-id", # Connect to an existing table:my-table table = connection.table(table_id) - key = 'r1' - row = table.row(key.encode('utf-8')) + key = "r1" + row = table.row(key.encode("utf-8")) - column = 'cf1:c1'.encode('utf-8') - value = row[column].decode('utf-8') - print('Row key: {}\nData: {}'.format(key, value)) + column = "cf1:c1".encode("utf-8") + value = row[column].decode("utf-8") + print("Row key: {}\nData: {}".format(key, value)) finally: connection.close() -if __name__ == '__main__': +if __name__ == "__main__": parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.ArgumentDefaultsHelpFormatter) - parser.add_argument('project_id', help='Your Cloud Platform project ID.') + description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter + ) + parser.add_argument("project_id", help="Your Cloud Platform project ID.") parser.add_argument( - 'instance_id', help='ID of the Cloud Bigtable instance to connect to.') + "instance_id", help="ID of the Cloud Bigtable instance to connect to." + ) parser.add_argument( - '--table', - help='Existing table used in the quickstart.', - default='my-table') + "--table", help="Existing table used in the quickstart.", default="my-table" + ) args = parser.parse_args() main(args.project_id, args.instance_id, args.table) diff --git a/samples/quickstart_happybase/main_test.py b/samples/quickstart_happybase/main_test.py index 26afa6d6b..dc62ebede 100644 --- a/samples/quickstart_happybase/main_test.py +++ b/samples/quickstart_happybase/main_test.py @@ -21,9 +21,9 @@ from main import main -PROJECT = os.environ['GOOGLE_CLOUD_PROJECT'] -BIGTABLE_INSTANCE = os.environ['BIGTABLE_INSTANCE'] -TABLE_ID_FORMAT = 'quickstart-hb-test-{}' +PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] +BIGTABLE_INSTANCE = os.environ["BIGTABLE_INSTANCE"] +TABLE_ID_FORMAT = "quickstart-hb-test-{}" @pytest.fixture() @@ -32,7 +32,7 @@ def table(): client = bigtable.Client(project=PROJECT, admin=True) instance = client.instance(BIGTABLE_INSTANCE) table = instance.table(table_id) - column_family_id = 'cf1' + column_family_id = "cf1" column_families = {column_family_id: None} table.create(column_families=column_families) @@ -50,4 +50,4 @@ def test_main(capsys, table): main(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - assert 'Row key: r1\nData: test-value\n' in out + assert "Row key: r1\nData: test-value\n" in out diff --git a/samples/quickstart_happybase/noxfile.py b/samples/quickstart_happybase/noxfile.py index ba55d7ce5..93a9122cc 100644 --- a/samples/quickstart_happybase/noxfile.py +++ b/samples/quickstart_happybase/noxfile.py @@ -17,6 +17,7 @@ import os from pathlib import Path import sys +from typing import Callable, Dict, List, Optional import nox @@ -27,8 +28,9 @@ # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING -# Copy `noxfile_config.py` to your directory and modify it instead. +BLACK_VERSION = "black==19.10b0" +# Copy `noxfile_config.py` to your directory and modify it instead. # `TEST_CONFIG` dict is a configuration hook that allows users to # modify the test configurations. The values here should be in sync @@ -37,24 +39,29 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - 'ignored_versions': ["2.7"], - + "ignored_versions": [], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": False, # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - 'envs': {}, + "envs": {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append('.') + sys.path.append(".") from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -64,36 +71,43 @@ TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) -def get_pytest_env_vars(): +def get_pytest_env_vars() -> Dict[str, str]: """Returns a dict for pytest invocation.""" ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG['gcloud_project_env'] + env_key = TEST_CONFIG["gcloud_project_env"] # This should error out if not set. - ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG['envs']) + ret.update(TEST_CONFIG["envs"]) return ret # DO NOT EDIT - automatically generated. -# All versions used to tested samples. -ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] +# All versions used to test samples. +ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) -INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False)) +INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( + "True", + "true", +) + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + # # Style Checks # -def _determine_local_import_names(start_dir): +def _determine_local_import_names(start_dir: str) -> List[str]: """Determines all import names that should be considered "local". This is used when running the linter to insure that import order is @@ -131,18 +145,34 @@ def _determine_local_import_names(start_dir): @nox.session -def lint(session): - session.install("flake8", "flake8-import-order") +def lint(session: nox.sessions.Session) -> None: + if not TEST_CONFIG["enforce_type_hints"]: + session.install("flake8", "flake8-import-order") + else: + session.install("flake8", "flake8-import-order", "flake8-annotations") local_names = _determine_local_import_names(".") args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - "." + ".", ] session.run("flake8", *args) +# +# Black +# + + +@nox.session +def blacken(session: nox.sessions.Session) -> None: + session.install(BLACK_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + session.run("black", *python_files) + + # # Sample Tests # @@ -151,13 +181,24 @@ def lint(session): PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests(session, post_install=None): +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): - session.install("-r", "requirements.txt") + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") if os.path.exists("requirements-test.txt"): - session.install("-r", "requirements-test.txt") + if os.path.exists("constraints-test.txt"): + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") + else: + session.install("-r", "requirements-test.txt") if INSTALL_LIBRARY_FROM_SOURCE: session.install("-e", _get_repo_root()) @@ -172,19 +213,19 @@ def _session_tests(session, post_install=None): # on travis where slow and flaky tests are excluded. # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html success_codes=[0, 5], - env=get_pytest_env_vars() + env=get_pytest_env_vars(), ) @nox.session(python=ALL_VERSIONS) -def py(session): +def py(session: nox.sessions.Session) -> None: """Runs py.test for a sample using the specified version of Python.""" if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip("SKIPPED: {} tests are disabled for this sample.".format( - session.python - )) + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) # @@ -192,7 +233,7 @@ def py(session): # -def _get_repo_root(): +def _get_repo_root() -> Optional[str]: """ Returns the root folder of the project. """ # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) @@ -201,6 +242,11 @@ def _get_repo_root(): break if Path(p / ".git").exists(): return str(p) + # .git is not available in repos cloned via Cloud Build + # setup.py is always in the library's root, so use that instead + # https://github.com/googleapis/synthtool/issues/792 + if Path(p / "setup.py").exists(): + return str(p) p = p.parent raise Exception("Unable to detect repository root.") @@ -210,7 +256,7 @@ def _get_repo_root(): @nox.session @nox.parametrize("path", GENERATED_READMES) -def readmegen(session, path): +def readmegen(session: nox.sessions.Session, path: str) -> None: """(Re-)generates the readme for a sample.""" session.install("jinja2", "pyyaml") dir_ = os.path.dirname(path) diff --git a/samples/snippets/filters/filter_snippets.py b/samples/snippets/filters/filter_snippets.py index c815eae99..4211378f3 100644 --- a/samples/snippets/filters/filter_snippets.py +++ b/samples/snippets/filters/filter_snippets.py @@ -29,7 +29,7 @@ def filter_limit_row_sample(project_id, instance_id, table_id): instance = client.instance(instance_id) table = instance.table(table_id) - rows = table.read_rows(filter_=row_filters.RowSampleFilter(.75)) + rows = table.read_rows(filter_=row_filters.RowSampleFilter(0.75)) for row in rows: print_row(row) @@ -42,7 +42,8 @@ def filter_limit_row_regex(project_id, instance_id, table_id): table = instance.table(table_id) rows = table.read_rows( - filter_=row_filters.RowKeyRegexFilter(".*#20190501$".encode("utf-8"))) + filter_=row_filters.RowKeyRegexFilter(".*#20190501$".encode("utf-8")) + ) for row in rows: print_row(row) @@ -91,7 +92,8 @@ def filter_limit_col_family_regex(project_id, instance_id, table_id): table = instance.table(table_id) rows = table.read_rows( - filter_=row_filters.FamilyNameRegexFilter("stats_.*$".encode("utf-8"))) + filter_=row_filters.FamilyNameRegexFilter("stats_.*$".encode("utf-8")) + ) for row in rows: print_row(row) @@ -104,8 +106,8 @@ def filter_limit_col_qualifier_regex(project_id, instance_id, table_id): table = instance.table(table_id) rows = table.read_rows( - filter_=row_filters.ColumnQualifierRegexFilter( - "connected_.*$".encode("utf-8"))) + filter_=row_filters.ColumnQualifierRegexFilter("connected_.*$".encode("utf-8")) + ) for row in rows: print_row(row) @@ -118,10 +120,10 @@ def filter_limit_col_range(project_id, instance_id, table_id): table = instance.table(table_id) rows = table.read_rows( - filter_=row_filters.ColumnRangeFilter("cell_plan", - b"data_plan_01gb", - b"data_plan_10gb", - inclusive_end=False)) + filter_=row_filters.ColumnRangeFilter( + "cell_plan", b"data_plan_01gb", b"data_plan_10gb", inclusive_end=False + ) + ) for row in rows: print_row(row) @@ -134,7 +136,8 @@ def filter_limit_value_range(project_id, instance_id, table_id): table = instance.table(table_id) rows = table.read_rows( - filter_=row_filters.ValueRangeFilter(b"PQ2A.190405", b"PQ2A.190406")) + filter_=row_filters.ValueRangeFilter(b"PQ2A.190405", b"PQ2A.190406") + ) for row in rows: print_row(row) @@ -150,7 +153,8 @@ def filter_limit_value_regex(project_id, instance_id, table_id): table = instance.table(table_id) rows = table.read_rows( - filter_=row_filters.ValueRegexFilter("PQ2A.*$".encode("utf-8"))) + filter_=row_filters.ValueRegexFilter("PQ2A.*$".encode("utf-8")) + ) for row in rows: print_row(row) @@ -165,8 +169,8 @@ def filter_limit_timestamp_range(project_id, instance_id, table_id): end = datetime.datetime(2019, 5, 1) rows = table.read_rows( - filter_=row_filters.TimestampRangeFilter( - row_filters.TimestampRange(end=end))) + filter_=row_filters.TimestampRangeFilter(row_filters.TimestampRange(end=end)) + ) for row in rows: print_row(row) @@ -202,8 +206,7 @@ def filter_modify_strip_value(project_id, instance_id, table_id): instance = client.instance(instance_id) table = instance.table(table_id) - rows = table.read_rows( - filter_=row_filters.StripValueTransformerFilter(True)) + rows = table.read_rows(filter_=row_filters.StripValueTransformerFilter(True)) for row in rows: print_row(row) @@ -215,8 +218,7 @@ def filter_modify_apply_label(project_id, instance_id, table_id): instance = client.instance(instance_id) table = instance.table(table_id) - rows = table.read_rows( - filter_=row_filters.ApplyLabelFilter(label="labelled")) + rows = table.read_rows(filter_=row_filters.ApplyLabelFilter(label="labelled")) for row in rows: print_row(row) @@ -228,9 +230,14 @@ def filter_composing_chain(project_id, instance_id, table_id): instance = client.instance(instance_id) table = instance.table(table_id) - rows = table.read_rows(filter_=row_filters.RowFilterChain( - filters=[row_filters.CellsColumnLimitFilter(1), - row_filters.FamilyNameRegexFilter("cell_plan")])) + rows = table.read_rows( + filter_=row_filters.RowFilterChain( + filters=[ + row_filters.CellsColumnLimitFilter(1), + row_filters.FamilyNameRegexFilter("cell_plan"), + ] + ) + ) for row in rows: print_row(row) @@ -242,9 +249,14 @@ def filter_composing_interleave(project_id, instance_id, table_id): instance = client.instance(instance_id) table = instance.table(table_id) - rows = table.read_rows(filter_=row_filters.RowFilterUnion( - filters=[row_filters.ValueRegexFilter("true"), - row_filters.ColumnQualifierRegexFilter("os_build")])) + rows = table.read_rows( + filter_=row_filters.RowFilterUnion( + filters=[ + row_filters.ValueRegexFilter("true"), + row_filters.ColumnQualifierRegexFilter("os_build"), + ] + ) + ) for row in rows: print_row(row) @@ -256,16 +268,18 @@ def filter_composing_condition(project_id, instance_id, table_id): instance = client.instance(instance_id) table = instance.table(table_id) - rows = table.read_rows(filter_=row_filters.ConditionalRowFilter( - base_filter=row_filters.RowFilterChain(filters=[ - row_filters.ColumnQualifierRegexFilter( - "data_plan_10gb"), - row_filters.ValueRegexFilter( - "true")]), - true_filter=row_filters.ApplyLabelFilter(label="passed-filter"), - false_filter=row_filters.ApplyLabelFilter(label="filtered-out") - - )) + rows = table.read_rows( + filter_=row_filters.ConditionalRowFilter( + base_filter=row_filters.RowFilterChain( + filters=[ + row_filters.ColumnQualifierRegexFilter("data_plan_10gb"), + row_filters.ValueRegexFilter("true"), + ] + ), + true_filter=row_filters.ApplyLabelFilter(label="passed-filter"), + false_filter=row_filters.ApplyLabelFilter(label="filtered-out"), + ) + ) for row in rows: print_row(row) @@ -275,16 +289,23 @@ def filter_composing_condition(project_id, instance_id, table_id): def print_row(row): - print("Reading data for {}:".format(row.row_key.decode('utf-8'))) + print("Reading data for {}:".format(row.row_key.decode("utf-8"))) for cf, cols in sorted(row.cells.items()): print("Column Family {}".format(cf)) for col, cells in sorted(cols.items()): for cell in cells: - labels = " [{}]".format(",".join(cell.labels)) \ - if len(cell.labels) else "" + labels = ( + " [{}]".format(",".join(cell.labels)) if len(cell.labels) else "" + ) print( - "\t{}: {} @{}{}".format(col.decode('utf-8'), - cell.value.decode('utf-8'), - cell.timestamp, labels)) + "\t{}: {} @{}{}".format( + col.decode("utf-8"), + cell.value.decode("utf-8"), + cell.timestamp, + labels, + ) + ) print("") + + # [END bigtable_filters_print] diff --git a/samples/snippets/filters/filters_test.py b/samples/snippets/filters/filters_test.py index 36dc4a5b1..35cf62ff0 100644 --- a/samples/snippets/filters/filters_test.py +++ b/samples/snippets/filters/filters_test.py @@ -23,9 +23,9 @@ import filter_snippets -PROJECT = os.environ['GOOGLE_CLOUD_PROJECT'] -BIGTABLE_INSTANCE = os.environ['BIGTABLE_INSTANCE'] -TABLE_ID_PREFIX = 'mobile-time-series-{}' +PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] +BIGTABLE_INSTANCE = os.environ["BIGTABLE_INSTANCE"] +TABLE_ID_PREFIX = "mobile-time-series-{}" @pytest.fixture(scope="module", autouse=True) @@ -40,11 +40,10 @@ def table_id(): if table.exists(): table.delete() - table.create(column_families={'stats_summary': None, 'cell_plan': None}) + table.create(column_families={"stats_summary": None, "cell_plan": None}) timestamp = datetime.datetime(2019, 5, 1) - timestamp_minus_hr = datetime.datetime(2019, 5, 1) - datetime.timedelta( - hours=1) + timestamp_minus_hr = datetime.datetime(2019, 5, 1) - datetime.timedelta(hours=1) row_keys = [ "phone#4c410523#20190501", @@ -99,98 +98,88 @@ def table_id(): def test_filter_limit_row_sample(capsys, snapshot, table_id): - filter_snippets.filter_limit_row_sample(PROJECT, BIGTABLE_INSTANCE, - table_id) + filter_snippets.filter_limit_row_sample(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - assert 'Reading data for' in out + assert "Reading data for" in out def test_filter_limit_row_regex(capsys, snapshot, table_id): - filter_snippets.filter_limit_row_regex(PROJECT, BIGTABLE_INSTANCE, - table_id) + filter_snippets.filter_limit_row_regex(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() snapshot.assert_match(out) def test_filter_limit_cells_per_col(capsys, snapshot, table_id): - filter_snippets.filter_limit_cells_per_col(PROJECT, BIGTABLE_INSTANCE, - table_id) + filter_snippets.filter_limit_cells_per_col(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() snapshot.assert_match(out) def test_filter_limit_cells_per_row(capsys, snapshot, table_id): - filter_snippets.filter_limit_cells_per_row(PROJECT, BIGTABLE_INSTANCE, - table_id) + filter_snippets.filter_limit_cells_per_row(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() snapshot.assert_match(out) def test_filter_limit_cells_per_row_offset(capsys, snapshot, table_id): - filter_snippets.filter_limit_cells_per_row_offset(PROJECT, - BIGTABLE_INSTANCE, - table_id) + filter_snippets.filter_limit_cells_per_row_offset( + PROJECT, BIGTABLE_INSTANCE, table_id + ) out, _ = capsys.readouterr() snapshot.assert_match(out) def test_filter_limit_col_family_regex(capsys, snapshot, table_id): - filter_snippets.filter_limit_col_family_regex(PROJECT, BIGTABLE_INSTANCE, - table_id) + filter_snippets.filter_limit_col_family_regex(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() snapshot.assert_match(out) def test_filter_limit_col_qualifier_regex(capsys, snapshot, table_id): - filter_snippets.filter_limit_col_qualifier_regex(PROJECT, - BIGTABLE_INSTANCE, - table_id) + filter_snippets.filter_limit_col_qualifier_regex( + PROJECT, BIGTABLE_INSTANCE, table_id + ) out, _ = capsys.readouterr() snapshot.assert_match(out) def test_filter_limit_col_range(capsys, snapshot, table_id): - filter_snippets.filter_limit_col_range(PROJECT, BIGTABLE_INSTANCE, - table_id) + filter_snippets.filter_limit_col_range(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() snapshot.assert_match(out) def test_filter_limit_value_range(capsys, snapshot, table_id): - filter_snippets.filter_limit_value_range(PROJECT, BIGTABLE_INSTANCE, - table_id) + filter_snippets.filter_limit_value_range(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() snapshot.assert_match(out) def test_filter_limit_value_regex(capsys, snapshot, table_id): - filter_snippets.filter_limit_value_regex(PROJECT, BIGTABLE_INSTANCE, - table_id) + filter_snippets.filter_limit_value_regex(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() snapshot.assert_match(out) def test_filter_limit_timestamp_range(capsys, snapshot, table_id): - filter_snippets.filter_limit_timestamp_range(PROJECT, BIGTABLE_INSTANCE, - table_id) + filter_snippets.filter_limit_timestamp_range(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() snapshot.assert_match(out) def test_filter_limit_block_all(capsys, snapshot, table_id): - filter_snippets.filter_limit_block_all(PROJECT, BIGTABLE_INSTANCE, - table_id) + filter_snippets.filter_limit_block_all(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() snapshot.assert_match(out) @@ -204,40 +193,35 @@ def test_filter_limit_pass_all(capsys, snapshot, table_id): def test_filter_modify_strip_value(capsys, snapshot, table_id): - filter_snippets.filter_modify_strip_value(PROJECT, BIGTABLE_INSTANCE, - table_id) + filter_snippets.filter_modify_strip_value(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() snapshot.assert_match(out) def test_filter_modify_apply_label(capsys, snapshot, table_id): - filter_snippets.filter_modify_apply_label(PROJECT, BIGTABLE_INSTANCE, - table_id) + filter_snippets.filter_modify_apply_label(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() snapshot.assert_match(out) def test_filter_composing_chain(capsys, snapshot, table_id): - filter_snippets.filter_composing_chain(PROJECT, BIGTABLE_INSTANCE, - table_id) + filter_snippets.filter_composing_chain(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() snapshot.assert_match(out) def test_filter_composing_interleave(capsys, snapshot, table_id): - filter_snippets.filter_composing_interleave(PROJECT, BIGTABLE_INSTANCE, - table_id) + filter_snippets.filter_composing_interleave(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() snapshot.assert_match(out) def test_filter_composing_condition(capsys, snapshot, table_id): - filter_snippets.filter_composing_condition(PROJECT, BIGTABLE_INSTANCE, - table_id) + filter_snippets.filter_composing_condition(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() snapshot.assert_match(out) diff --git a/samples/snippets/filters/noxfile.py b/samples/snippets/filters/noxfile.py index ba55d7ce5..93a9122cc 100644 --- a/samples/snippets/filters/noxfile.py +++ b/samples/snippets/filters/noxfile.py @@ -17,6 +17,7 @@ import os from pathlib import Path import sys +from typing import Callable, Dict, List, Optional import nox @@ -27,8 +28,9 @@ # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING -# Copy `noxfile_config.py` to your directory and modify it instead. +BLACK_VERSION = "black==19.10b0" +# Copy `noxfile_config.py` to your directory and modify it instead. # `TEST_CONFIG` dict is a configuration hook that allows users to # modify the test configurations. The values here should be in sync @@ -37,24 +39,29 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - 'ignored_versions': ["2.7"], - + "ignored_versions": [], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": False, # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - 'envs': {}, + "envs": {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append('.') + sys.path.append(".") from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -64,36 +71,43 @@ TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) -def get_pytest_env_vars(): +def get_pytest_env_vars() -> Dict[str, str]: """Returns a dict for pytest invocation.""" ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG['gcloud_project_env'] + env_key = TEST_CONFIG["gcloud_project_env"] # This should error out if not set. - ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG['envs']) + ret.update(TEST_CONFIG["envs"]) return ret # DO NOT EDIT - automatically generated. -# All versions used to tested samples. -ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] +# All versions used to test samples. +ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) -INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False)) +INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( + "True", + "true", +) + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + # # Style Checks # -def _determine_local_import_names(start_dir): +def _determine_local_import_names(start_dir: str) -> List[str]: """Determines all import names that should be considered "local". This is used when running the linter to insure that import order is @@ -131,18 +145,34 @@ def _determine_local_import_names(start_dir): @nox.session -def lint(session): - session.install("flake8", "flake8-import-order") +def lint(session: nox.sessions.Session) -> None: + if not TEST_CONFIG["enforce_type_hints"]: + session.install("flake8", "flake8-import-order") + else: + session.install("flake8", "flake8-import-order", "flake8-annotations") local_names = _determine_local_import_names(".") args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - "." + ".", ] session.run("flake8", *args) +# +# Black +# + + +@nox.session +def blacken(session: nox.sessions.Session) -> None: + session.install(BLACK_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + session.run("black", *python_files) + + # # Sample Tests # @@ -151,13 +181,24 @@ def lint(session): PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests(session, post_install=None): +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): - session.install("-r", "requirements.txt") + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") if os.path.exists("requirements-test.txt"): - session.install("-r", "requirements-test.txt") + if os.path.exists("constraints-test.txt"): + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") + else: + session.install("-r", "requirements-test.txt") if INSTALL_LIBRARY_FROM_SOURCE: session.install("-e", _get_repo_root()) @@ -172,19 +213,19 @@ def _session_tests(session, post_install=None): # on travis where slow and flaky tests are excluded. # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html success_codes=[0, 5], - env=get_pytest_env_vars() + env=get_pytest_env_vars(), ) @nox.session(python=ALL_VERSIONS) -def py(session): +def py(session: nox.sessions.Session) -> None: """Runs py.test for a sample using the specified version of Python.""" if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip("SKIPPED: {} tests are disabled for this sample.".format( - session.python - )) + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) # @@ -192,7 +233,7 @@ def py(session): # -def _get_repo_root(): +def _get_repo_root() -> Optional[str]: """ Returns the root folder of the project. """ # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) @@ -201,6 +242,11 @@ def _get_repo_root(): break if Path(p / ".git").exists(): return str(p) + # .git is not available in repos cloned via Cloud Build + # setup.py is always in the library's root, so use that instead + # https://github.com/googleapis/synthtool/issues/792 + if Path(p / "setup.py").exists(): + return str(p) p = p.parent raise Exception("Unable to detect repository root.") @@ -210,7 +256,7 @@ def _get_repo_root(): @nox.session @nox.parametrize("path", GENERATED_READMES) -def readmegen(session, path): +def readmegen(session: nox.sessions.Session, path: str) -> None: """(Re-)generates the readme for a sample.""" session.install("jinja2", "pyyaml") dir_ = os.path.dirname(path) diff --git a/samples/snippets/reads/noxfile.py b/samples/snippets/reads/noxfile.py index ba55d7ce5..93a9122cc 100644 --- a/samples/snippets/reads/noxfile.py +++ b/samples/snippets/reads/noxfile.py @@ -17,6 +17,7 @@ import os from pathlib import Path import sys +from typing import Callable, Dict, List, Optional import nox @@ -27,8 +28,9 @@ # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING -# Copy `noxfile_config.py` to your directory and modify it instead. +BLACK_VERSION = "black==19.10b0" +# Copy `noxfile_config.py` to your directory and modify it instead. # `TEST_CONFIG` dict is a configuration hook that allows users to # modify the test configurations. The values here should be in sync @@ -37,24 +39,29 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - 'ignored_versions': ["2.7"], - + "ignored_versions": [], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": False, # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - 'envs': {}, + "envs": {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append('.') + sys.path.append(".") from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -64,36 +71,43 @@ TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) -def get_pytest_env_vars(): +def get_pytest_env_vars() -> Dict[str, str]: """Returns a dict for pytest invocation.""" ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG['gcloud_project_env'] + env_key = TEST_CONFIG["gcloud_project_env"] # This should error out if not set. - ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG['envs']) + ret.update(TEST_CONFIG["envs"]) return ret # DO NOT EDIT - automatically generated. -# All versions used to tested samples. -ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] +# All versions used to test samples. +ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) -INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False)) +INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( + "True", + "true", +) + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + # # Style Checks # -def _determine_local_import_names(start_dir): +def _determine_local_import_names(start_dir: str) -> List[str]: """Determines all import names that should be considered "local". This is used when running the linter to insure that import order is @@ -131,18 +145,34 @@ def _determine_local_import_names(start_dir): @nox.session -def lint(session): - session.install("flake8", "flake8-import-order") +def lint(session: nox.sessions.Session) -> None: + if not TEST_CONFIG["enforce_type_hints"]: + session.install("flake8", "flake8-import-order") + else: + session.install("flake8", "flake8-import-order", "flake8-annotations") local_names = _determine_local_import_names(".") args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - "." + ".", ] session.run("flake8", *args) +# +# Black +# + + +@nox.session +def blacken(session: nox.sessions.Session) -> None: + session.install(BLACK_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + session.run("black", *python_files) + + # # Sample Tests # @@ -151,13 +181,24 @@ def lint(session): PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests(session, post_install=None): +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): - session.install("-r", "requirements.txt") + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") if os.path.exists("requirements-test.txt"): - session.install("-r", "requirements-test.txt") + if os.path.exists("constraints-test.txt"): + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") + else: + session.install("-r", "requirements-test.txt") if INSTALL_LIBRARY_FROM_SOURCE: session.install("-e", _get_repo_root()) @@ -172,19 +213,19 @@ def _session_tests(session, post_install=None): # on travis where slow and flaky tests are excluded. # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html success_codes=[0, 5], - env=get_pytest_env_vars() + env=get_pytest_env_vars(), ) @nox.session(python=ALL_VERSIONS) -def py(session): +def py(session: nox.sessions.Session) -> None: """Runs py.test for a sample using the specified version of Python.""" if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip("SKIPPED: {} tests are disabled for this sample.".format( - session.python - )) + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) # @@ -192,7 +233,7 @@ def py(session): # -def _get_repo_root(): +def _get_repo_root() -> Optional[str]: """ Returns the root folder of the project. """ # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) @@ -201,6 +242,11 @@ def _get_repo_root(): break if Path(p / ".git").exists(): return str(p) + # .git is not available in repos cloned via Cloud Build + # setup.py is always in the library's root, so use that instead + # https://github.com/googleapis/synthtool/issues/792 + if Path(p / "setup.py").exists(): + return str(p) p = p.parent raise Exception("Unable to detect repository root.") @@ -210,7 +256,7 @@ def _get_repo_root(): @nox.session @nox.parametrize("path", GENERATED_READMES) -def readmegen(session, path): +def readmegen(session: nox.sessions.Session, path: str) -> None: """(Re-)generates the readme for a sample.""" session.install("jinja2", "pyyaml") dir_ = os.path.dirname(path) diff --git a/samples/snippets/reads/read_snippets.py b/samples/snippets/reads/read_snippets.py index 6936b4c64..afd0955b8 100644 --- a/samples/snippets/reads/read_snippets.py +++ b/samples/snippets/reads/read_snippets.py @@ -43,7 +43,7 @@ def read_row_partial(project_id, instance_id, table_id): table = instance.table(table_id) row_key = "phone#4c410523#20190501" - col_filter = row_filters.ColumnQualifierRegexFilter(b'os_build') + col_filter = row_filters.ColumnQualifierRegexFilter(b"os_build") row = table.read_row(row_key, filter_=col_filter) print_row(row) @@ -74,8 +74,8 @@ def read_row_range(project_id, instance_id, table_id): row_set = RowSet() row_set.add_row_range_from_keys( - start_key=b"phone#4c410523#20190501", - end_key=b"phone#4c410523#201906201") + start_key=b"phone#4c410523#20190501", end_key=b"phone#4c410523#201906201" + ) rows = table.read_rows(row_set=row_set) for row in rows: @@ -91,11 +91,11 @@ def read_row_ranges(project_id, instance_id, table_id): row_set = RowSet() row_set.add_row_range_from_keys( - start_key=b"phone#4c410523#20190501", - end_key=b"phone#4c410523#201906201") + start_key=b"phone#4c410523#20190501", end_key=b"phone#4c410523#201906201" + ) row_set.add_row_range_from_keys( - start_key=b"phone#5c10102#20190501", - end_key=b"phone#5c10102#201906201") + start_key=b"phone#5c10102#20190501", end_key=b"phone#5c10102#201906201" + ) rows = table.read_rows(row_set=row_set) for row in rows: @@ -112,8 +112,7 @@ def read_prefix(project_id, instance_id, table_id): end_key = prefix[:-1] + chr(ord(prefix[-1]) + 1) row_set = RowSet() - row_set.add_row_range_from_keys(prefix.encode("utf-8"), - end_key.encode("utf-8")) + row_set.add_row_range_from_keys(prefix.encode("utf-8"), end_key.encode("utf-8")) rows = table.read_rows(row_set=row_set) for row in rows: @@ -137,16 +136,23 @@ def read_filter(project_id, instance_id, table_id): def print_row(row): - print("Reading data for {}:".format(row.row_key.decode('utf-8'))) + print("Reading data for {}:".format(row.row_key.decode("utf-8"))) for cf, cols in sorted(row.cells.items()): print("Column Family {}".format(cf)) for col, cells in sorted(cols.items()): for cell in cells: - labels = " [{}]".format(",".join(cell.labels)) \ - if len(cell.labels) else "" + labels = ( + " [{}]".format(",".join(cell.labels)) if len(cell.labels) else "" + ) print( - "\t{}: {} @{}{}".format(col.decode('utf-8'), - cell.value.decode('utf-8'), - cell.timestamp, labels)) + "\t{}: {} @{}{}".format( + col.decode("utf-8"), + cell.value.decode("utf-8"), + cell.timestamp, + labels, + ) + ) print("") + + # [END bigtable_reads_print] diff --git a/samples/snippets/reads/reads_test.py b/samples/snippets/reads/reads_test.py index fc3421000..0b61e341f 100644 --- a/samples/snippets/reads/reads_test.py +++ b/samples/snippets/reads/reads_test.py @@ -21,9 +21,9 @@ import read_snippets -PROJECT = os.environ['GOOGLE_CLOUD_PROJECT'] -BIGTABLE_INSTANCE = os.environ['BIGTABLE_INSTANCE'] -TABLE_ID_PREFIX = 'mobile-time-series-{}' +PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] +BIGTABLE_INSTANCE = os.environ["BIGTABLE_INSTANCE"] +TABLE_ID_PREFIX = "mobile-time-series-{}" @pytest.fixture(scope="module", autouse=True) @@ -36,7 +36,7 @@ def table_id(): if table.exists(): table.delete() - table.create(column_families={'stats_summary': None}) + table.create(column_families={"stats_summary": None}) # table = instance.table(table_id) diff --git a/samples/snippets/writes/noxfile.py b/samples/snippets/writes/noxfile.py index ba55d7ce5..93a9122cc 100644 --- a/samples/snippets/writes/noxfile.py +++ b/samples/snippets/writes/noxfile.py @@ -17,6 +17,7 @@ import os from pathlib import Path import sys +from typing import Callable, Dict, List, Optional import nox @@ -27,8 +28,9 @@ # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING -# Copy `noxfile_config.py` to your directory and modify it instead. +BLACK_VERSION = "black==19.10b0" +# Copy `noxfile_config.py` to your directory and modify it instead. # `TEST_CONFIG` dict is a configuration hook that allows users to # modify the test configurations. The values here should be in sync @@ -37,24 +39,29 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - 'ignored_versions': ["2.7"], - + "ignored_versions": [], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": False, # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - 'envs': {}, + "envs": {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append('.') + sys.path.append(".") from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -64,36 +71,43 @@ TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) -def get_pytest_env_vars(): +def get_pytest_env_vars() -> Dict[str, str]: """Returns a dict for pytest invocation.""" ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG['gcloud_project_env'] + env_key = TEST_CONFIG["gcloud_project_env"] # This should error out if not set. - ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG['envs']) + ret.update(TEST_CONFIG["envs"]) return ret # DO NOT EDIT - automatically generated. -# All versions used to tested samples. -ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] +# All versions used to test samples. +ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) -INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False)) +INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( + "True", + "true", +) + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + # # Style Checks # -def _determine_local_import_names(start_dir): +def _determine_local_import_names(start_dir: str) -> List[str]: """Determines all import names that should be considered "local". This is used when running the linter to insure that import order is @@ -131,18 +145,34 @@ def _determine_local_import_names(start_dir): @nox.session -def lint(session): - session.install("flake8", "flake8-import-order") +def lint(session: nox.sessions.Session) -> None: + if not TEST_CONFIG["enforce_type_hints"]: + session.install("flake8", "flake8-import-order") + else: + session.install("flake8", "flake8-import-order", "flake8-annotations") local_names = _determine_local_import_names(".") args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - "." + ".", ] session.run("flake8", *args) +# +# Black +# + + +@nox.session +def blacken(session: nox.sessions.Session) -> None: + session.install(BLACK_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + session.run("black", *python_files) + + # # Sample Tests # @@ -151,13 +181,24 @@ def lint(session): PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests(session, post_install=None): +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): - session.install("-r", "requirements.txt") + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") if os.path.exists("requirements-test.txt"): - session.install("-r", "requirements-test.txt") + if os.path.exists("constraints-test.txt"): + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") + else: + session.install("-r", "requirements-test.txt") if INSTALL_LIBRARY_FROM_SOURCE: session.install("-e", _get_repo_root()) @@ -172,19 +213,19 @@ def _session_tests(session, post_install=None): # on travis where slow and flaky tests are excluded. # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html success_codes=[0, 5], - env=get_pytest_env_vars() + env=get_pytest_env_vars(), ) @nox.session(python=ALL_VERSIONS) -def py(session): +def py(session: nox.sessions.Session) -> None: """Runs py.test for a sample using the specified version of Python.""" if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip("SKIPPED: {} tests are disabled for this sample.".format( - session.python - )) + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) # @@ -192,7 +233,7 @@ def py(session): # -def _get_repo_root(): +def _get_repo_root() -> Optional[str]: """ Returns the root folder of the project. """ # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) @@ -201,6 +242,11 @@ def _get_repo_root(): break if Path(p / ".git").exists(): return str(p) + # .git is not available in repos cloned via Cloud Build + # setup.py is always in the library's root, so use that instead + # https://github.com/googleapis/synthtool/issues/792 + if Path(p / "setup.py").exists(): + return str(p) p = p.parent raise Exception("Unable to detect repository root.") @@ -210,7 +256,7 @@ def _get_repo_root(): @nox.session @nox.parametrize("path", GENERATED_READMES) -def readmegen(session, path): +def readmegen(session: nox.sessions.Session, path: str) -> None: """(Re-)generates the readme for a sample.""" session.install("jinja2", "pyyaml") dir_ = os.path.dirname(path) diff --git a/samples/snippets/writes/write_batch.py b/samples/snippets/writes/write_batch.py index ecc8f273b..fd5117242 100644 --- a/samples/snippets/writes/write_batch.py +++ b/samples/snippets/writes/write_batch.py @@ -26,30 +26,22 @@ def write_batch(project_id, instance_id, table_id): timestamp = datetime.datetime.utcnow() column_family_id = "stats_summary" - rows = [table.direct_row("tablet#a0b81f74#20190501"), - table.direct_row("tablet#a0b81f74#20190502")] - - rows[0].set_cell(column_family_id, - "connected_wifi", - 1, - timestamp) - rows[0].set_cell(column_family_id, - "os_build", - "12155.0.0-rc1", - timestamp) - rows[1].set_cell(column_family_id, - "connected_wifi", - 1, - timestamp) - rows[1].set_cell(column_family_id, - "os_build", - "12145.0.0-rc6", - timestamp) + rows = [ + table.direct_row("tablet#a0b81f74#20190501"), + table.direct_row("tablet#a0b81f74#20190502"), + ] + + rows[0].set_cell(column_family_id, "connected_wifi", 1, timestamp) + rows[0].set_cell(column_family_id, "os_build", "12155.0.0-rc1", timestamp) + rows[1].set_cell(column_family_id, "connected_wifi", 1, timestamp) + rows[1].set_cell(column_family_id, "os_build", "12145.0.0-rc6", timestamp) response = table.mutate_rows(rows) for i, status in enumerate(response): if status.code != 0: print("Error writing row: {}".format(status.message)) - print('Successfully wrote 2 rows.') + print("Successfully wrote 2 rows.") + + # [END bigtable_writes_batch] diff --git a/samples/snippets/writes/write_conditionally.py b/samples/snippets/writes/write_conditionally.py index 5f3d4d607..7fb640aad 100644 --- a/samples/snippets/writes/write_conditionally.py +++ b/samples/snippets/writes/write_conditionally.py @@ -30,15 +30,17 @@ def write_conditional(project_id, instance_id, table_id): row_key = "phone#4c410523#20190501" row_filter = row_filters.RowFilterChain( - filters=[row_filters.FamilyNameRegexFilter(column_family_id), - row_filters.ColumnQualifierRegexFilter('os_build'), - row_filters.ValueRegexFilter("PQ2A\\..*")]) + filters=[ + row_filters.FamilyNameRegexFilter(column_family_id), + row_filters.ColumnQualifierRegexFilter("os_build"), + row_filters.ValueRegexFilter("PQ2A\\..*"), + ] + ) row = table.conditional_row(row_key, filter_=row_filter) - row.set_cell(column_family_id, - "os_name", - "android", - timestamp) + row.set_cell(column_family_id, "os_name", "android", timestamp) row.commit() - print('Successfully updated row\'s os_name.') + print("Successfully updated row's os_name.") + + # [END bigtable_writes_conditional] diff --git a/samples/snippets/writes/write_increment.py b/samples/snippets/writes/write_increment.py index 73ce52c2f..ac8e2d16a 100644 --- a/samples/snippets/writes/write_increment.py +++ b/samples/snippets/writes/write_increment.py @@ -30,5 +30,7 @@ def write_increment(project_id, instance_id, table_id): row.increment_cell_value(column_family_id, "connected_wifi", -1) row.commit() - print('Successfully updated row {}.'.format(row_key)) + print("Successfully updated row {}.".format(row_key)) + + # [END bigtable_writes_increment] diff --git a/samples/snippets/writes/write_simple.py b/samples/snippets/writes/write_simple.py index b4222d234..1aa5a810f 100644 --- a/samples/snippets/writes/write_simple.py +++ b/samples/snippets/writes/write_simple.py @@ -30,20 +30,13 @@ def write_simple(project_id, instance_id, table_id): row_key = "phone#4c410523#20190501" row = table.direct_row(row_key) - row.set_cell(column_family_id, - "connected_cell", - 1, - timestamp) - row.set_cell(column_family_id, - "connected_wifi", - 1, - timestamp) - row.set_cell(column_family_id, - "os_build", - "PQ2A.190405.003", - timestamp) + row.set_cell(column_family_id, "connected_cell", 1, timestamp) + row.set_cell(column_family_id, "connected_wifi", 1, timestamp) + row.set_cell(column_family_id, "os_build", "PQ2A.190405.003", timestamp) row.commit() - print('Successfully wrote row {}.'.format(row_key)) + print("Successfully wrote row {}.".format(row_key)) + + # [END bigtable_writes_simple] diff --git a/samples/snippets/writes/writes_test.py b/samples/snippets/writes/writes_test.py index abe300095..77ae883d6 100644 --- a/samples/snippets/writes/writes_test.py +++ b/samples/snippets/writes/writes_test.py @@ -26,9 +26,9 @@ from .write_simple import write_simple -PROJECT = os.environ['GOOGLE_CLOUD_PROJECT'] -BIGTABLE_INSTANCE = os.environ['BIGTABLE_INSTANCE'] -TABLE_ID_PREFIX = 'mobile-time-series-{}' +PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] +BIGTABLE_INSTANCE = os.environ["BIGTABLE_INSTANCE"] +TABLE_ID_PREFIX = "mobile-time-series-{}" @pytest.fixture @@ -48,7 +48,7 @@ def table_id(bigtable_instance): if table.exists(): table.delete() - column_family_id = 'stats_summary' + column_family_id = "stats_summary" column_families = {column_family_id: None} table.create(column_families=column_families) @@ -67,7 +67,7 @@ def _write_simple(): _write_simple() out, _ = capsys.readouterr() - assert 'Successfully wrote row' in out + assert "Successfully wrote row" in out @backoff.on_exception(backoff.expo, DeadlineExceeded, max_time=60) def _write_increment(): @@ -75,7 +75,7 @@ def _write_increment(): _write_increment() out, _ = capsys.readouterr() - assert 'Successfully updated row' in out + assert "Successfully updated row" in out @backoff.on_exception(backoff.expo, DeadlineExceeded, max_time=60) def _write_conditional(): @@ -83,7 +83,7 @@ def _write_conditional(): _write_conditional() out, _ = capsys.readouterr() - assert 'Successfully updated row\'s os_name' in out + assert "Successfully updated row's os_name" in out @backoff.on_exception(backoff.expo, DeadlineExceeded, max_time=60) def _write_batch(): @@ -91,4 +91,4 @@ def _write_batch(): _write_batch() out, _ = capsys.readouterr() - assert 'Successfully wrote 2 rows' in out + assert "Successfully wrote 2 rows" in out diff --git a/samples/tableadmin/noxfile.py b/samples/tableadmin/noxfile.py index ba55d7ce5..93a9122cc 100644 --- a/samples/tableadmin/noxfile.py +++ b/samples/tableadmin/noxfile.py @@ -17,6 +17,7 @@ import os from pathlib import Path import sys +from typing import Callable, Dict, List, Optional import nox @@ -27,8 +28,9 @@ # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING -# Copy `noxfile_config.py` to your directory and modify it instead. +BLACK_VERSION = "black==19.10b0" +# Copy `noxfile_config.py` to your directory and modify it instead. # `TEST_CONFIG` dict is a configuration hook that allows users to # modify the test configurations. The values here should be in sync @@ -37,24 +39,29 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - 'ignored_versions': ["2.7"], - + "ignored_versions": [], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": False, # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - 'envs': {}, + "envs": {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append('.') + sys.path.append(".") from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -64,36 +71,43 @@ TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) -def get_pytest_env_vars(): +def get_pytest_env_vars() -> Dict[str, str]: """Returns a dict for pytest invocation.""" ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG['gcloud_project_env'] + env_key = TEST_CONFIG["gcloud_project_env"] # This should error out if not set. - ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG['envs']) + ret.update(TEST_CONFIG["envs"]) return ret # DO NOT EDIT - automatically generated. -# All versions used to tested samples. -ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] +# All versions used to test samples. +ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) -INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False)) +INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( + "True", + "true", +) + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + # # Style Checks # -def _determine_local_import_names(start_dir): +def _determine_local_import_names(start_dir: str) -> List[str]: """Determines all import names that should be considered "local". This is used when running the linter to insure that import order is @@ -131,18 +145,34 @@ def _determine_local_import_names(start_dir): @nox.session -def lint(session): - session.install("flake8", "flake8-import-order") +def lint(session: nox.sessions.Session) -> None: + if not TEST_CONFIG["enforce_type_hints"]: + session.install("flake8", "flake8-import-order") + else: + session.install("flake8", "flake8-import-order", "flake8-annotations") local_names = _determine_local_import_names(".") args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - "." + ".", ] session.run("flake8", *args) +# +# Black +# + + +@nox.session +def blacken(session: nox.sessions.Session) -> None: + session.install(BLACK_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + session.run("black", *python_files) + + # # Sample Tests # @@ -151,13 +181,24 @@ def lint(session): PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests(session, post_install=None): +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): - session.install("-r", "requirements.txt") + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") if os.path.exists("requirements-test.txt"): - session.install("-r", "requirements-test.txt") + if os.path.exists("constraints-test.txt"): + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") + else: + session.install("-r", "requirements-test.txt") if INSTALL_LIBRARY_FROM_SOURCE: session.install("-e", _get_repo_root()) @@ -172,19 +213,19 @@ def _session_tests(session, post_install=None): # on travis where slow and flaky tests are excluded. # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html success_codes=[0, 5], - env=get_pytest_env_vars() + env=get_pytest_env_vars(), ) @nox.session(python=ALL_VERSIONS) -def py(session): +def py(session: nox.sessions.Session) -> None: """Runs py.test for a sample using the specified version of Python.""" if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip("SKIPPED: {} tests are disabled for this sample.".format( - session.python - )) + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) # @@ -192,7 +233,7 @@ def py(session): # -def _get_repo_root(): +def _get_repo_root() -> Optional[str]: """ Returns the root folder of the project. """ # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) @@ -201,6 +242,11 @@ def _get_repo_root(): break if Path(p / ".git").exists(): return str(p) + # .git is not available in repos cloned via Cloud Build + # setup.py is always in the library's root, so use that instead + # https://github.com/googleapis/synthtool/issues/792 + if Path(p / "setup.py").exists(): + return str(p) p = p.parent raise Exception("Unable to detect repository root.") @@ -210,7 +256,7 @@ def _get_repo_root(): @nox.session @nox.parametrize("path", GENERATED_READMES) -def readmegen(session, path): +def readmegen(session: nox.sessions.Session, path: str) -> None: """(Re-)generates the readme for a sample.""" session.install("jinja2", "pyyaml") dir_ = os.path.dirname(path) diff --git a/samples/tableadmin/tableadmin.py b/samples/tableadmin/tableadmin.py index 29551a7f3..7c28601fb 100644 --- a/samples/tableadmin/tableadmin.py +++ b/samples/tableadmin/tableadmin.py @@ -38,7 +38,7 @@ def create_table(project_id, instance_id, table_id): - ''' Create a Bigtable table + """Create a Bigtable table :type project_id: str :param project_id: Project id of the client. @@ -48,7 +48,7 @@ def create_table(project_id, instance_id, table_id): :type table_id: str :param table_id: Table id to create table. - ''' + """ client = bigtable.Client(project=project_id, admin=True) instance = client.instance(instance_id) @@ -56,19 +56,19 @@ def create_table(project_id, instance_id, table_id): # Check whether table exists in an instance. # Create table if it does not exists. - print('Checking if table {} exists...'.format(table_id)) + print("Checking if table {} exists...".format(table_id)) if table.exists(): - print('Table {} already exists.'.format(table_id)) + print("Table {} already exists.".format(table_id)) else: - print('Creating the {} table.'.format(table_id)) + print("Creating the {} table.".format(table_id)) table.create() - print('Created table {}.'.format(table_id)) + print("Created table {}.".format(table_id)) return client, instance, table def run_table_operations(project_id, instance_id, table_id): - ''' Create a Bigtable table and perform basic operations on it + """Create a Bigtable table and perform basic operations on it :type project_id: str :param project_id: Project id of the client. @@ -78,78 +78,84 @@ def run_table_operations(project_id, instance_id, table_id): :type table_id: str :param table_id: Table id to create table. - ''' + """ client, instance, table = create_table(project_id, instance_id, table_id) # [START bigtable_list_tables] tables = instance.list_tables() - print('Listing tables in current project...') + print("Listing tables in current project...") if tables != []: for tbl in tables: print(tbl.table_id) else: - print('No table exists in current project...') + print("No table exists in current project...") # [END bigtable_list_tables] # [START bigtable_create_family_gc_max_age] - print('Creating column family cf1 with with MaxAge GC Rule...') + print("Creating column family cf1 with with MaxAge GC Rule...") # Create a column family with GC policy : maximum age # where age = current time minus cell timestamp # Define the GC rule to retain data with max age of 5 days max_age_rule = column_family.MaxAgeGCRule(datetime.timedelta(days=5)) - column_family1 = table.column_family('cf1', max_age_rule) + column_family1 = table.column_family("cf1", max_age_rule) column_family1.create() - print('Created column family cf1 with MaxAge GC Rule.') + print("Created column family cf1 with MaxAge GC Rule.") # [END bigtable_create_family_gc_max_age] # [START bigtable_create_family_gc_max_versions] - print('Creating column family cf2 with max versions GC rule...') + print("Creating column family cf2 with max versions GC rule...") # Create a column family with GC policy : most recent N versions # where 1 = most recent version # Define the GC policy to retain only the most recent 2 versions max_versions_rule = column_family.MaxVersionsGCRule(2) - column_family2 = table.column_family('cf2', max_versions_rule) + column_family2 = table.column_family("cf2", max_versions_rule) column_family2.create() - print('Created column family cf2 with Max Versions GC Rule.') + print("Created column family cf2 with Max Versions GC Rule.") # [END bigtable_create_family_gc_max_versions] # [START bigtable_create_family_gc_union] - print('Creating column family cf3 with union GC rule...') + print("Creating column family cf3 with union GC rule...") # Create a column family with GC policy to drop data that matches # at least one condition. # Define a GC rule to drop cells older than 5 days or not the # most recent version - union_rule = column_family.GCRuleUnion([ - column_family.MaxAgeGCRule(datetime.timedelta(days=5)), - column_family.MaxVersionsGCRule(2)]) - - column_family3 = table.column_family('cf3', union_rule) + union_rule = column_family.GCRuleUnion( + [ + column_family.MaxAgeGCRule(datetime.timedelta(days=5)), + column_family.MaxVersionsGCRule(2), + ] + ) + + column_family3 = table.column_family("cf3", union_rule) column_family3.create() - print('Created column family cf3 with Union GC rule') + print("Created column family cf3 with Union GC rule") # [END bigtable_create_family_gc_union] # [START bigtable_create_family_gc_intersection] - print('Creating column family cf4 with Intersection GC rule...') + print("Creating column family cf4 with Intersection GC rule...") # Create a column family with GC policy to drop data that matches # all conditions # GC rule: Drop cells older than 5 days AND older than the most # recent 2 versions - intersection_rule = column_family.GCRuleIntersection([ - column_family.MaxAgeGCRule(datetime.timedelta(days=5)), - column_family.MaxVersionsGCRule(2)]) - - column_family4 = table.column_family('cf4', intersection_rule) + intersection_rule = column_family.GCRuleIntersection( + [ + column_family.MaxAgeGCRule(datetime.timedelta(days=5)), + column_family.MaxVersionsGCRule(2), + ] + ) + + column_family4 = table.column_family("cf4", intersection_rule) column_family4.create() - print('Created column family cf4 with Intersection GC rule.') + print("Created column family cf4 with Intersection GC rule.") # [END bigtable_create_family_gc_intersection] # [START bigtable_create_family_gc_nested] - print('Creating column family cf5 with a Nested GC rule...') + print("Creating column family cf5 with a Nested GC rule...") # Create a column family with nested GC policies. # Create a nested GC rule: # Drop cells that are either older than the 10 recent versions @@ -157,23 +163,26 @@ def run_table_operations(project_id, instance_id, table_id): # Drop cells that are older than a month AND older than the # 2 recent versions rule1 = column_family.MaxVersionsGCRule(10) - rule2 = column_family.GCRuleIntersection([ - column_family.MaxAgeGCRule(datetime.timedelta(days=30)), - column_family.MaxVersionsGCRule(2)]) + rule2 = column_family.GCRuleIntersection( + [ + column_family.MaxAgeGCRule(datetime.timedelta(days=30)), + column_family.MaxVersionsGCRule(2), + ] + ) nested_rule = column_family.GCRuleUnion([rule1, rule2]) - column_family5 = table.column_family('cf5', nested_rule) + column_family5 = table.column_family("cf5", nested_rule) column_family5.create() - print('Created column family cf5 with a Nested GC rule.') + print("Created column family cf5 with a Nested GC rule.") # [END bigtable_create_family_gc_nested] # [START bigtable_list_column_families] - print('Printing Column Family and GC Rule for all column families...') + print("Printing Column Family and GC Rule for all column families...") column_families = table.list_column_families() for column_family_name, gc_rule in sorted(column_families.items()): - print('Column Family:', column_family_name) - print('GC Rule:') + print("Column Family:", column_family_name) + print("GC Rule:") print(gc_rule.to_pb()) # Sample output: # Column Family: cf4 @@ -192,37 +201,37 @@ def run_table_operations(project_id, instance_id, table_id): # } # [END bigtable_list_column_families] - print('Print column family cf1 GC rule before update...') - print('Column Family: cf1') + print("Print column family cf1 GC rule before update...") + print("Column Family: cf1") print(column_family1.to_pb()) # [START bigtable_update_gc_rule] - print('Updating column family cf1 GC rule...') + print("Updating column family cf1 GC rule...") # Update the column family cf1 to update the GC rule - column_family1 = table.column_family( - 'cf1', - column_family.MaxVersionsGCRule(1)) + column_family1 = table.column_family("cf1", column_family.MaxVersionsGCRule(1)) column_family1.update() - print('Updated column family cf1 GC rule\n') + print("Updated column family cf1 GC rule\n") # [END bigtable_update_gc_rule] - print('Print column family cf1 GC rule after update...') - print('Column Family: cf1') + print("Print column family cf1 GC rule after update...") + print("Column Family: cf1") print(column_family1.to_pb()) # [START bigtable_delete_family] - print('Delete a column family cf2...') + print("Delete a column family cf2...") # Delete a column family column_family2.delete() - print('Column family cf2 deleted successfully.') + print("Column family cf2 deleted successfully.") # [END bigtable_delete_family] - print('execute command "python tableadmin.py delete [project_id] \ - [instance_id] --table [tableName]" to delete the table.') + print( + 'execute command "python tableadmin.py delete [project_id] \ + [instance_id] --table [tableName]" to delete the table.' + ) def delete_table(project_id, instance_id, table_id): - ''' Delete bigtable. + """Delete bigtable. :type project_id: str :param project_id: Project id of the client. @@ -232,7 +241,7 @@ def delete_table(project_id, instance_id, table_id): :type table_id: str :param table_id: Table id to create table. - ''' + """ client = bigtable.Client(project=project_id, admin=True) instance = client.instance(instance_id) @@ -241,43 +250,44 @@ def delete_table(project_id, instance_id, table_id): # [START bigtable_delete_table] # Delete the entire table - print('Checking if table {} exists...'.format(table_id)) + print("Checking if table {} exists...".format(table_id)) if table.exists(): - print('Table {} exists.'.format(table_id)) - print('Deleting {} table.'.format(table_id)) + print("Table {} exists.".format(table_id)) + print("Deleting {} table.".format(table_id)) table.delete() - print('Deleted {} table.'.format(table_id)) + print("Deleted {} table.".format(table_id)) else: - print('Table {} does not exists.'.format(table_id)) + print("Table {} does not exists.".format(table_id)) # [END bigtable_delete_table] -if __name__ == '__main__': +if __name__ == "__main__": parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.ArgumentDefaultsHelpFormatter) + description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter + ) - parser.add_argument('command', - help='run or delete. \ - Operation to perform on table.') parser.add_argument( - '--table', - help='Cloud Bigtable Table name.', - default='Hello-Bigtable') + "command", + help="run or delete. \ + Operation to perform on table.", + ) + parser.add_argument( + "--table", help="Cloud Bigtable Table name.", default="Hello-Bigtable" + ) - parser.add_argument('project_id', - help='Your Cloud Platform project ID.') + parser.add_argument("project_id", help="Your Cloud Platform project ID.") parser.add_argument( - 'instance_id', - help='ID of the Cloud Bigtable instance to connect to.') + "instance_id", help="ID of the Cloud Bigtable instance to connect to." + ) args = parser.parse_args() - if args.command.lower() == 'run': - run_table_operations(args.project_id, args.instance_id, - args.table) - elif args.command.lower() == 'delete': + if args.command.lower() == "run": + run_table_operations(args.project_id, args.instance_id, args.table) + elif args.command.lower() == "delete": delete_table(args.project_id, args.instance_id, args.table) else: - print('Command should be either run or delete.\n Use argument -h,\ - --help to show help and exit.') + print( + "Command should be either run or delete.\n Use argument -h,\ + --help to show help and exit." + ) diff --git a/samples/tableadmin/tableadmin_test.py b/samples/tableadmin/tableadmin_test.py index b001ce076..3063eee9f 100755 --- a/samples/tableadmin/tableadmin_test.py +++ b/samples/tableadmin/tableadmin_test.py @@ -23,9 +23,9 @@ from tableadmin import delete_table from tableadmin import run_table_operations -PROJECT = os.environ['GOOGLE_CLOUD_PROJECT'] -BIGTABLE_INSTANCE = os.environ['BIGTABLE_INSTANCE'] -TABLE_ID_FORMAT = 'tableadmin-test-{}' +PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] +BIGTABLE_INSTANCE = os.environ["BIGTABLE_INSTANCE"] +TABLE_ID_FORMAT = "tableadmin-test-{}" retry_429_503 = RetryErrors(exceptions.TooManyRequests, exceptions.ServiceUnavailable) @@ -36,22 +36,22 @@ def test_run_table_operations(capsys): retry_429_503(run_table_operations)(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - assert 'Creating the ' + table_id + ' table.' in out - assert 'Listing tables in current project.' in out - assert 'Creating column family cf1 with with MaxAge GC Rule' in out - assert 'Created column family cf1 with MaxAge GC Rule.' in out - assert 'Created column family cf2 with Max Versions GC Rule.' in out - assert 'Created column family cf3 with Union GC rule' in out - assert 'Created column family cf4 with Intersection GC rule.' in out - assert 'Created column family cf5 with a Nested GC rule.' in out - assert 'Printing Column Family and GC Rule for all column families.' in out - assert 'Updating column family cf1 GC rule...' in out - assert 'Updated column family cf1 GC rule' in out - assert 'Print column family cf1 GC rule after update...' in out - assert 'Column Family: cf1' in out - assert 'max_num_versions: 1' in out - assert 'Delete a column family cf2...' in out - assert 'Column family cf2 deleted successfully.' in out + assert "Creating the " + table_id + " table." in out + assert "Listing tables in current project." in out + assert "Creating column family cf1 with with MaxAge GC Rule" in out + assert "Created column family cf1 with MaxAge GC Rule." in out + assert "Created column family cf2 with Max Versions GC Rule." in out + assert "Created column family cf3 with Union GC rule" in out + assert "Created column family cf4 with Intersection GC rule." in out + assert "Created column family cf5 with a Nested GC rule." in out + assert "Printing Column Family and GC Rule for all column families." in out + assert "Updating column family cf1 GC rule..." in out + assert "Updated column family cf1 GC rule" in out + assert "Print column family cf1 GC rule after update..." in out + assert "Column Family: cf1" in out + assert "max_num_versions: 1" in out + assert "Delete a column family cf2..." in out + assert "Column family cf2 deleted successfully." in out retry_429_503(delete_table)(PROJECT, BIGTABLE_INSTANCE, table_id) @@ -63,6 +63,6 @@ def test_delete_table(capsys): retry_429_503(delete_table)(PROJECT, BIGTABLE_INSTANCE, table_id) out, _ = capsys.readouterr() - assert 'Table ' + table_id + ' exists.' in out - assert 'Deleting ' + table_id + ' table.' in out - assert 'Deleted ' + table_id + ' table.' in out + assert "Table " + table_id + " exists." in out + assert "Deleting " + table_id + " table." in out + assert "Deleted " + table_id + " table." in out