diff --git a/README.rst b/README.rst index cdb6ce31..c0d4feeb 100644 --- a/README.rst +++ b/README.rst @@ -1,7 +1,7 @@ Python Client for BigQuery Data Transfer API ============================================ -|GA| |pypi| |versions| +|GA| |pypi| |versions| The `BigQuery Data Transfer API`_ allows users to transfer data from partner SaaS applications to Google BigQuery on a scheduled, managed basis. @@ -79,32 +79,6 @@ Windows \Scripts\activate \Scripts\pip.exe install google-cloud-bigquery-datatransfer -Example Usage -~~~~~~~~~~~~~ - -DataTransferServiceClient -^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. code:: py - - from google.cloud import bigquery_datatransfer_v1 - - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - parent = client.location_path('[PROJECT]', '[LOCATION]') - - - # Iterate over all results - for element in client.list_data_sources(parent): - # process element - pass - - # Or iterate over results one page at a time - for page in client.list_data_sources(parent).pages: - for element in page: - # process element - pass - Next Steps ~~~~~~~~~~ diff --git a/samples/snippets/conftest.py b/samples/snippets/conftest.py new file mode 100644 index 00000000..44a8fb49 --- /dev/null +++ b/samples/snippets/conftest.py @@ -0,0 +1,53 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import google.api_core.exceptions +import google.auth +from google.cloud import bigquery +from google.cloud import bigquery_datatransfer +import pytest + + +@pytest.fixture(scope="session") +def default_credentials(): + return google.auth.default(["https://www.googleapis.com/auth/cloud-platform"]) + + +@pytest.fixture(scope="session") +def project_id(default_credentials): + _, project_id = default_credentials + return project_id + + +@pytest.fixture(scope="session") +def bigquery_client(default_credentials): + credentials, project_id = default_credentials + return bigquery.Client(credentials=credentials, project=project_id) + + +@pytest.fixture(scope="session") +def transfer_client(default_credentials): + credentials, _ = default_credentials + return bigquery_datatransfer.DataTransferServiceClient(credentials=credentials) + + +@pytest.fixture +def to_delete_configs(transfer_client): + to_delete = [] + yield to_delete + for config_name in to_delete: + try: + transfer_client.delete_transfer_config(name=config_name) + except google.api_core.exceptions.GoogleAPICallError: + pass diff --git a/samples/snippets/copy_dataset_test.py b/samples/snippets/copy_dataset_test.py index 6ff70237..00a5e560 100644 --- a/samples/snippets/copy_dataset_test.py +++ b/samples/snippets/copy_dataset_test.py @@ -15,10 +15,6 @@ import datetime import uuid -import google.api_core.exceptions -import google.auth -from google.cloud import bigquery -from google.cloud import bigquery_datatransfer import pytest from . import copy_dataset @@ -29,40 +25,6 @@ def temp_suffix(): return f"{now.strftime('%Y%m%d%H%M%S')}_{uuid.uuid4().hex[:8]}" -@pytest.fixture(scope="session") -def default_credentials(): - return google.auth.default(["https://www.googleapis.com/auth/cloud-platform"]) - - -@pytest.fixture(scope="session") -def project_id(default_credentials): - _, project_id = default_credentials - return project_id - - -@pytest.fixture(scope="session") -def bigquery_client(default_credentials): - credentials, project_id = default_credentials - return bigquery.Client(credentials=credentials, project=project_id) - - -@pytest.fixture(scope="session") -def transfer_client(default_credentials): - credentials, _ = default_credentials - return bigquery_datatransfer.DataTransferServiceClient(credentials=credentials) - - -@pytest.fixture -def to_delete_configs(transfer_client): - to_delete = [] - yield to_delete - for config_name in to_delete: - try: - transfer_client.delete_transfer_config(name=config_name) - except google.api_core.exceptions.GoogleAPICallError: - pass - - @pytest.fixture(scope="module") def destination_dataset_id(bigquery_client, project_id): dataset_id = f"bqdts_dest_{temp_suffix()}" diff --git a/samples/snippets/quickstart.py b/samples/snippets/quickstart.py index 042a6459..de8d05e5 100644 --- a/samples/snippets/quickstart.py +++ b/samples/snippets/quickstart.py @@ -14,29 +14,36 @@ # See the License for the specific language governing permissions and # limitations under the License. +import sys -def run_quickstart(project="my-project"): + +def run_quickstart(override_values={}): # [START bigquerydatatransfer_quickstart] from google.cloud import bigquery_datatransfer client = bigquery_datatransfer.DataTransferServiceClient() # TODO: Update to your project ID. - # project = "my-project" + project_id = "my-project" + # [END bigquerydatatransfer_quickstart] + # To facilitate testing, we replace values with alternatives + # provided by the testing harness. + project_id = override_values.get("project_id", project_id) + # [START bigquerydatatransfer_quickstart] # Get the full path to your project. - parent = f"projects/{project}" + parent = client.common_project_path(project_id) - print('Supported Data Sources:') + print("Supported Data Sources:") # Iterate over all possible data sources. for data_source in client.list_data_sources(parent=parent): - print('{}:'.format(data_source.display_name)) - print('\tID: {}'.format(data_source.data_source_id)) - print('\tFull path: {}'.format(data_source.name)) - print('\tDescription: {}'.format(data_source.description)) + print("{}:".format(data_source.display_name)) + print("\tID: {}".format(data_source.data_source_id)) + print("\tFull path: {}".format(data_source.name)) + print("\tDescription: {}".format(data_source.description)) # [END bigquerydatatransfer_quickstart] -if __name__ == '__main__': - run_quickstart() +if __name__ == "__main__": + run_quickstart(override_values={"project_id": sys.argv[1]}) diff --git a/samples/snippets/quickstart_test.py b/samples/snippets/quickstart_test.py index 5b873c63..570d5181 100644 --- a/samples/snippets/quickstart_test.py +++ b/samples/snippets/quickstart_test.py @@ -12,24 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -import os - -import pytest - from . import quickstart -PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] - - -@pytest.fixture -def mock_project_id(): - """Mock out project and replace with project from environment.""" - - return PROJECT - - -def test_quickstart(capsys, mock_project_id): - quickstart.run_quickstart(mock_project_id) +def test_quickstart(capsys, project_id): + quickstart.run_quickstart(override_values={"project_id": project_id}) out, _ = capsys.readouterr() assert "Supported Data Sources:" in out