Skip to content
This repository has been archived by the owner on Nov 29, 2023. It is now read-only.

Commit

Permalink
docs: remove out-of-date sample from README (#80)
Browse files Browse the repository at this point in the history
See samples/ directory for maintained and tested samples
  • Loading branch information
tswast committed Dec 10, 2020
1 parent aa216bf commit af0406e
Show file tree
Hide file tree
Showing 5 changed files with 73 additions and 91 deletions.
28 changes: 1 addition & 27 deletions README.rst
@@ -1,7 +1,7 @@
Python Client for BigQuery Data Transfer API
============================================

|GA| |pypi| |versions|
|GA| |pypi| |versions|

The `BigQuery Data Transfer API`_ allows users to transfer data from partner
SaaS applications to Google BigQuery on a scheduled, managed basis.
Expand Down Expand Up @@ -79,32 +79,6 @@ Windows
<your-env>\Scripts\activate
<your-env>\Scripts\pip.exe install google-cloud-bigquery-datatransfer
Example Usage
~~~~~~~~~~~~~

DataTransferServiceClient
^^^^^^^^^^^^^^^^^^^^^^^^^

.. code:: py
from google.cloud import bigquery_datatransfer_v1
client = bigquery_datatransfer_v1.DataTransferServiceClient()
parent = client.location_path('[PROJECT]', '[LOCATION]')
# Iterate over all results
for element in client.list_data_sources(parent):
# process element
pass
# Or iterate over results one page at a time
for page in client.list_data_sources(parent).pages:
for element in page:
# process element
pass
Next Steps
~~~~~~~~~~

Expand Down
53 changes: 53 additions & 0 deletions samples/snippets/conftest.py
@@ -0,0 +1,53 @@
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import google.api_core.exceptions
import google.auth
from google.cloud import bigquery
from google.cloud import bigquery_datatransfer
import pytest


@pytest.fixture(scope="session")
def default_credentials():
return google.auth.default(["https://www.googleapis.com/auth/cloud-platform"])


@pytest.fixture(scope="session")
def project_id(default_credentials):
_, project_id = default_credentials
return project_id


@pytest.fixture(scope="session")
def bigquery_client(default_credentials):
credentials, project_id = default_credentials
return bigquery.Client(credentials=credentials, project=project_id)


@pytest.fixture(scope="session")
def transfer_client(default_credentials):
credentials, _ = default_credentials
return bigquery_datatransfer.DataTransferServiceClient(credentials=credentials)


@pytest.fixture
def to_delete_configs(transfer_client):
to_delete = []
yield to_delete
for config_name in to_delete:
try:
transfer_client.delete_transfer_config(name=config_name)
except google.api_core.exceptions.GoogleAPICallError:
pass
38 changes: 0 additions & 38 deletions samples/snippets/copy_dataset_test.py
Expand Up @@ -15,10 +15,6 @@
import datetime
import uuid

import google.api_core.exceptions
import google.auth
from google.cloud import bigquery
from google.cloud import bigquery_datatransfer
import pytest

from . import copy_dataset
Expand All @@ -29,40 +25,6 @@ def temp_suffix():
return f"{now.strftime('%Y%m%d%H%M%S')}_{uuid.uuid4().hex[:8]}"


@pytest.fixture(scope="session")
def default_credentials():
return google.auth.default(["https://www.googleapis.com/auth/cloud-platform"])


@pytest.fixture(scope="session")
def project_id(default_credentials):
_, project_id = default_credentials
return project_id


@pytest.fixture(scope="session")
def bigquery_client(default_credentials):
credentials, project_id = default_credentials
return bigquery.Client(credentials=credentials, project=project_id)


@pytest.fixture(scope="session")
def transfer_client(default_credentials):
credentials, _ = default_credentials
return bigquery_datatransfer.DataTransferServiceClient(credentials=credentials)


@pytest.fixture
def to_delete_configs(transfer_client):
to_delete = []
yield to_delete
for config_name in to_delete:
try:
transfer_client.delete_transfer_config(name=config_name)
except google.api_core.exceptions.GoogleAPICallError:
pass


@pytest.fixture(scope="module")
def destination_dataset_id(bigquery_client, project_id):
dataset_id = f"bqdts_dest_{temp_suffix()}"
Expand Down
27 changes: 17 additions & 10 deletions samples/snippets/quickstart.py
Expand Up @@ -14,29 +14,36 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import sys

def run_quickstart(project="my-project"):

def run_quickstart(override_values={}):
# [START bigquerydatatransfer_quickstart]
from google.cloud import bigquery_datatransfer

client = bigquery_datatransfer.DataTransferServiceClient()

# TODO: Update to your project ID.
# project = "my-project"
project_id = "my-project"
# [END bigquerydatatransfer_quickstart]
# To facilitate testing, we replace values with alternatives
# provided by the testing harness.
project_id = override_values.get("project_id", project_id)
# [START bigquerydatatransfer_quickstart]

# Get the full path to your project.
parent = f"projects/{project}"
parent = client.common_project_path(project_id)

print('Supported Data Sources:')
print("Supported Data Sources:")

# Iterate over all possible data sources.
for data_source in client.list_data_sources(parent=parent):
print('{}:'.format(data_source.display_name))
print('\tID: {}'.format(data_source.data_source_id))
print('\tFull path: {}'.format(data_source.name))
print('\tDescription: {}'.format(data_source.description))
print("{}:".format(data_source.display_name))
print("\tID: {}".format(data_source.data_source_id))
print("\tFull path: {}".format(data_source.name))
print("\tDescription: {}".format(data_source.description))
# [END bigquerydatatransfer_quickstart]


if __name__ == '__main__':
run_quickstart()
if __name__ == "__main__":
run_quickstart(override_values={"project_id": sys.argv[1]})
18 changes: 2 additions & 16 deletions samples/snippets/quickstart_test.py
Expand Up @@ -12,24 +12,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import os

import pytest

from . import quickstart


PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"]


@pytest.fixture
def mock_project_id():
"""Mock out project and replace with project from environment."""

return PROJECT


def test_quickstart(capsys, mock_project_id):
quickstart.run_quickstart(mock_project_id)
def test_quickstart(capsys, project_id):
quickstart.run_quickstart(override_values={"project_id": project_id})
out, _ = capsys.readouterr()
assert "Supported Data Sources:" in out

0 comments on commit af0406e

Please sign in to comment.