Skip to content

Commit

Permalink
docs: update snippets samples to support version 2.0 (#309)
Browse files Browse the repository at this point in the history
* docs: update snippets samples to support version 2.0

For some reason, old versions of the google-cloud-bigquery-storage
library were still getting used. This pins those dependencies directly,
instead.

Also, updates the samples to remove warnings about `client.dataset`.

* blacken
  • Loading branch information
tswast committed Oct 9, 2020
1 parent 1a4dc12 commit 61634be
Show file tree
Hide file tree
Showing 6 changed files with 25 additions and 11 deletions.
8 changes: 6 additions & 2 deletions samples/snippets/authorized_view_tutorial.py
Expand Up @@ -27,16 +27,18 @@ def run_authorized_view_tutorial(override_values={}):

client = bigquery.Client()
source_dataset_id = "github_source_data"
source_dataset_id_full = "{}.{}".format(client.project, source_dataset_id)

# [END bigquery_authorized_view_tutorial]
# [END bigquery_avt_create_source_dataset]
# To facilitate testing, we replace values with alternatives
# provided by the testing harness.
source_dataset_id = override_values.get("source_dataset_id", source_dataset_id)
source_dataset_id_full = "{}.{}".format(client.project, source_dataset_id)
# [START bigquery_authorized_view_tutorial]
# [START bigquery_avt_create_source_dataset]

source_dataset = bigquery.Dataset(client.dataset(source_dataset_id))
source_dataset = bigquery.Dataset(source_dataset_id_full)
# Specify the geographic location where the dataset should reside.
source_dataset.location = "US"
source_dataset = client.create_dataset(source_dataset) # API request
Expand Down Expand Up @@ -66,16 +68,18 @@ def run_authorized_view_tutorial(override_values={}):
# Create a separate dataset to store your view
# [START bigquery_avt_create_shared_dataset]
shared_dataset_id = "shared_views"
shared_dataset_id_full = "{}.{}".format(client.project, shared_dataset_id)

# [END bigquery_authorized_view_tutorial]
# [END bigquery_avt_create_shared_dataset]
# To facilitate testing, we replace values with alternatives
# provided by the testing harness.
shared_dataset_id = override_values.get("shared_dataset_id", shared_dataset_id)
shared_dataset_id_full = "{}.{}".format(client.project, shared_dataset_id)
# [START bigquery_authorized_view_tutorial]
# [START bigquery_avt_create_shared_dataset]

shared_dataset = bigquery.Dataset(client.dataset(shared_dataset_id))
shared_dataset = bigquery.Dataset(shared_dataset_id_full)
shared_dataset.location = "US"
shared_dataset = client.create_dataset(shared_dataset) # API request
# [END bigquery_avt_create_shared_dataset]
Expand Down
10 changes: 7 additions & 3 deletions samples/snippets/authorized_view_tutorial_test.py
Expand Up @@ -30,7 +30,7 @@ def datasets_to_delete(client):
doomed = []
yield doomed
for item in doomed:
client.delete_dataset(item, delete_contents=True)
client.delete_dataset(item, delete_contents=True, not_found_ok=True)


def test_authorized_view_tutorial(client, datasets_to_delete):
Expand All @@ -42,8 +42,12 @@ def test_authorized_view_tutorial(client, datasets_to_delete):
str(uuid.uuid4()).replace("-", "_")
),
}
source_dataset_ref = client.dataset(override_values["source_dataset_id"])
shared_dataset_ref = client.dataset(override_values["shared_dataset_id"])
source_dataset_ref = "{}.{}".format(
client.project, override_values["source_dataset_id"]
)
shared_dataset_ref = "{}.{}".format(
client.project, override_values["shared_dataset_id"]
)
datasets_to_delete.extend(
[override_values["source_dataset_id"], override_values["shared_dataset_id"]]
)
Expand Down
4 changes: 3 additions & 1 deletion samples/snippets/natality_tutorial.py
Expand Up @@ -38,13 +38,15 @@ def run_natality_tutorial(override_values={}):

# Prepare a reference to a new dataset for storing the query results.
dataset_id = "natality_regression"
dataset_id_full = "{}.{}".format(client.project, dataset_id)
# [END bigquery_query_natality_tutorial]
# To facilitate testing, we replace values with alternatives
# provided by the testing harness.
dataset_id = override_values.get("dataset_id", dataset_id)
dataset_id_full = "{}.{}".format(client.project, dataset_id)
# [START bigquery_query_natality_tutorial]

dataset = bigquery.Dataset(client.dataset(dataset_id))
dataset = bigquery.Dataset(dataset_id_full)

# Create the new BigQuery dataset.
dataset = client.create_dataset(dataset)
Expand Down
4 changes: 2 additions & 2 deletions samples/snippets/natality_tutorial_test.py
Expand Up @@ -43,8 +43,8 @@ def test_natality_tutorial(client, datasets_to_delete):

natality_tutorial.run_natality_tutorial(override_values)

table_ref = bigquery.Dataset(client.dataset(override_values["dataset_id"])).table(
"regression_input"
table_ref = "{}.{}.{}".format(
client.project, override_values["dataset_id"], "regression_input"
)
table = client.get_table(table_ref)
assert table.num_rows > 0
4 changes: 2 additions & 2 deletions samples/snippets/quickstart.py
Expand Up @@ -33,8 +33,8 @@ def run_quickstart(override_values={}):
# [START bigquery_quickstart]

# Prepares a reference to the new dataset
dataset_ref = bigquery_client.dataset(dataset_id)
dataset = bigquery.Dataset(dataset_ref)
dataset_id_full = "{}.{}".format(bigquery_client.project, dataset_id)
dataset = bigquery.Dataset(dataset_id_full)

# Creates the new dataset
dataset = bigquery_client.create_dataset(dataset)
Expand Down
6 changes: 5 additions & 1 deletion samples/snippets/requirements.txt
@@ -1,6 +1,10 @@
google-cloud-bigquery[pandas,bqstorage,pyarrow]==2.0.0
google-cloud-bigquery==2.0.0
google-cloud-bigquery-storage==2.0.0
google-auth-oauthlib==0.4.1
grpcio==1.32.0
ipython==7.16.1; python_version < '3.7'
ipython==7.17.0; python_version >= '3.7'
matplotlib==3.3.1
pandas==1.1.3
pyarrow==1.0.1
pytz==2020.1

0 comments on commit 61634be

Please sign in to comment.