diff --git a/samples/__init__.py b/samples/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/samples/create_scheduled_query.py b/samples/create_scheduled_query.py deleted file mode 100644 index 297e1f73..00000000 --- a/samples/create_scheduled_query.py +++ /dev/null @@ -1,97 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# To install the latest published package dependency, execute the following: -# pip install google-cloud-bigquery-datatransfer - - -def sample_create_transfer_config(project_id, dataset_id, authorization_code=""): - # [START bigquerydatatransfer_create_scheduled_query] - from google.cloud import bigquery_datatransfer - - client = bigquery_datatransfer.DataTransferServiceClient() - - # TODO(developer): Set the project_id to the project that contains the - # destination dataset. - # project_id = "your-project-id" - - # TODO(developer): Set the destination dataset. The authorized user must - # have owner permissions on the dataset. - # dataset_id = "your_dataset_id" - - # TODO(developer): The first time you run this sample, set the - # authorization code to a value from the URL: - # https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=433065040935-hav5fqnc9p9cht3rqneus9115ias2kn1.apps.googleusercontent.com&scope=https://www.googleapis.com/auth/bigquery%20https://www.googleapis.com/auth/drive&redirect_uri=urn:ietf:wg:oauth:2.0:oob - # - # authorization_code = "_4/ABCD-EFGHIJKLMNOP-QRSTUVWXYZ" - # - # You can use an empty string for authorization_code in subsequent runs of - # this code sample with the same credentials. - # - # authorization_code = "" - - # Use standard SQL syntax for the query. - query_string = """ - SELECT - CURRENT_TIMESTAMP() as current_time, - @run_time as intended_run_time, - @run_date as intended_run_date, - 17 as some_integer - """ - - parent = f"projects/{project_id}" - - transfer_config = bigquery_datatransfer.TransferConfig( - destination_dataset_id=dataset_id, - display_name="Your Scheduled Query Name", - data_source_id="scheduled_query", - params={ - "query": query_string, - "destination_table_name_template": "your_table_{run_date}", - "write_disposition": "WRITE_TRUNCATE", - "partitioning_field": "", - }, - schedule="every 24 hours", - ) - - response = client.create_transfer_config( - request={ - "parent": parent, - "transfer_config": transfer_config, - "authorization_code": authorization_code, - } - ) - - print("Created scheduled query '{}'".format(response.name)) - # [END bigquerydatatransfer_create_scheduled_query] - # Return the config name for testing purposes, so that it can be deleted. - return response.name - - -def main(): - import argparse - - parser = argparse.ArgumentParser() - parser.add_argument("--project_id", type=str, default="your-project-id") - parser.add_argument("--dataset_id", type=str, default="your_dataset_id") - parser.add_argument("--authorization_code", type=str, default="") - args = parser.parse_args() - - sample_create_transfer_config(args.project_id, args.dataset_id, args.authorization_code) - - -if __name__ == "__main__": - main() diff --git a/samples/noxfile.py b/samples/noxfile.py deleted file mode 100644 index bca0522e..00000000 --- a/samples/noxfile.py +++ /dev/null @@ -1,247 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function - -import os -from pathlib import Path -import sys -from typing import Callable, Dict, List, Optional - -import nox - - -# WARNING - WARNING - WARNING - WARNING - WARNING -# WARNING - WARNING - WARNING - WARNING - WARNING -# DO NOT EDIT THIS FILE EVER! -# WARNING - WARNING - WARNING - WARNING - WARNING -# WARNING - WARNING - WARNING - WARNING - WARNING - -# Copy `noxfile_config.py` to your directory and modify it instead. - - -# `TEST_CONFIG` dict is a configuration hook that allows users to -# modify the test configurations. The values here should be in sync -# with `noxfile_config.py`. Users will copy `noxfile_config.py` into -# their directory and modify it. - -TEST_CONFIG = { - # You can opt out from the test for specific Python versions. - 'ignored_versions': ["2.7"], - - # Old samples are opted out of enforcing Python type hints - # All new samples should feature them - 'enforce_type_hints': False, - - # An envvar key for determining the project id to use. Change it - # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a - # build specific Cloud project. You can also use your own string - # to use your own Cloud project. - 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', - # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - - # A dictionary you want to inject into your test. Don't put any - # secrets here. These values will override predefined values. - 'envs': {}, -} - - -try: - # Ensure we can import noxfile_config in the project's directory. - sys.path.append('.') - from noxfile_config import TEST_CONFIG_OVERRIDE -except ImportError as e: - print("No user noxfile_config found: detail: {}".format(e)) - TEST_CONFIG_OVERRIDE = {} - -# Update the TEST_CONFIG with the user supplied values. -TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) - - -def get_pytest_env_vars() -> Dict[str, str]: - """Returns a dict for pytest invocation.""" - ret = {} - - # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG['gcloud_project_env'] - # This should error out if not set. - ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] - - # Apply user supplied envs. - ret.update(TEST_CONFIG['envs']) - return ret - - -# DO NOT EDIT - automatically generated. -# All versions used to tested samples. -ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] - -# Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] - -TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) - -INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False)) -# -# Style Checks -# - - -def _determine_local_import_names(start_dir: str) -> List[str]: - """Determines all import names that should be considered "local". - - This is used when running the linter to insure that import order is - properly checked. - """ - file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] - return [ - basename - for basename, extension in file_ext_pairs - if extension == ".py" - or os.path.isdir(os.path.join(start_dir, basename)) - and basename not in ("__pycache__") - ] - - -# Linting with flake8. -# -# We ignore the following rules: -# E203: whitespace before ‘:’ -# E266: too many leading ‘#’ for block comment -# E501: line too long -# I202: Additional newline in a section of imports -# -# We also need to specify the rules which are ignored by default: -# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] -FLAKE8_COMMON_ARGS = [ - "--show-source", - "--builtin=gettext", - "--max-complexity=20", - "--import-order-style=google", - "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", - "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", - "--max-line-length=88", -] - - -@nox.session -def lint(session: nox.sessions.Session) -> None: - if not TEST_CONFIG['enforce_type_hints']: - session.install("flake8", "flake8-import-order") - else: - session.install("flake8", "flake8-import-order", "flake8-annotations") - - local_names = _determine_local_import_names(".") - args = FLAKE8_COMMON_ARGS + [ - "--application-import-names", - ",".join(local_names), - "." - ] - session.run("flake8", *args) -# -# Black -# - - -@nox.session -def blacken(session: nox.sessions.Session) -> None: - session.install("black") - python_files = [path for path in os.listdir(".") if path.endswith(".py")] - - session.run("black", *python_files) - -# -# Sample Tests -# - - -PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] - - -def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars() - ) - - -@nox.session(python=ALL_VERSIONS) -def py(session: nox.sessions.Session) -> None: - """Runs py.test for a sample using the specified version of Python.""" - if session.python in TESTED_VERSIONS: - _session_tests(session) - else: - session.skip("SKIPPED: {} tests are disabled for this sample.".format( - session.python - )) - - -# -# Readmegen -# - - -def _get_repo_root() -> Optional[str]: - """ Returns the root folder of the project. """ - # Get root of this repository. Assume we don't have directories nested deeper than 10 items. - p = Path(os.getcwd()) - for i in range(10): - if p is None: - break - if Path(p / ".git").exists(): - return str(p) - # .git is not available in repos cloned via Cloud Build - # setup.py is always in the library's root, so use that instead - # https://github.com/googleapis/synthtool/issues/792 - if Path(p / "setup.py").exists(): - return str(p) - p = p.parent - raise Exception("Unable to detect repository root.") - - -GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) - - -@nox.session -@nox.parametrize("path", GENERATED_READMES) -def readmegen(session: nox.sessions.Session, path: str) -> None: - """(Re-)generates the readme for a sample.""" - session.install("jinja2", "pyyaml") - dir_ = os.path.dirname(path) - - if os.path.exists(os.path.join(dir_, "requirements.txt")): - session.install("-r", os.path.join(dir_, "requirements.txt")) - - in_file = os.path.join(dir_, "README.rst.in") - session.run( - "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file - ) diff --git a/samples/requirements-test.txt b/samples/requirements-test.txt deleted file mode 100644 index cadf5ccd..00000000 --- a/samples/requirements-test.txt +++ /dev/null @@ -1,4 +0,0 @@ -pytest==6.0.1 -mock==4.0.2 - - diff --git a/samples/requirements.txt b/samples/requirements.txt deleted file mode 100644 index e3f405f0..00000000 --- a/samples/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -google-cloud-bigquery-datatransfer==3.0.0 -google-cloud-bigquery diff --git a/samples/update_transfer_config.py b/samples/update_transfer_config.py deleted file mode 100644 index 3e6ed1e8..00000000 --- a/samples/update_transfer_config.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# To install the latest published package dependency, execute the following: -# pip install google-cloud-bigquery-datatransfer - - -def sample_update_transfer_config(config_name, display_name): - # [START bigquerydatatransfer_update_config] - from google.cloud import bigquery_datatransfer - - client = bigquery_datatransfer.DataTransferServiceClient() - # TODO(developer): Set the config_name which user wants to update. - # config_name = "your-created-transfer-config-name" - - # TODO(developer): Set the display_name of transfer_config. - # config_name = "your-created-transfer-config-name" - - transfer_config = client.get_transfer_config(name=config_name) - transfer_config.display_name = display_name - field_mask = {"paths": ["display_name"]} - response = client.update_transfer_config( - transfer_config=transfer_config, update_mask=field_mask - ) - - print("Transfer config updated for '{}'".format(response.name)) - # [END bigquerydatatransfer_update_config] - # Return the config name for testing purposes, so that it can be deleted. - return response - - -def main(): - import argparse - - parser = argparse.ArgumentParser() - parser.add_argument("--transfer_config_name", type=str, default="your-created-transfer-config-name") - args = parser.parse_args() - - sample_update_transfer_config(args.transfer_config_name) - - -if __name__ == "__main__": - main()