Skip to content

Commit

Permalink
docs: add sample to delete job metadata (#798)
Browse files Browse the repository at this point in the history
  • Loading branch information
tswast committed Jul 22, 2021
1 parent 46e65a6 commit be9b242
Show file tree
Hide file tree
Showing 4 changed files with 102 additions and 17 deletions.
25 changes: 25 additions & 0 deletions samples/snippets/conftest.py
Expand Up @@ -50,6 +50,31 @@ def dataset_id(bigquery_client: bigquery.Client, project_id: str):
bigquery_client.delete_dataset(dataset, delete_contents=True, not_found_ok=True)


@pytest.fixture(scope="session")
def dataset_id_us_east1(bigquery_client: bigquery.Client, project_id: str):
dataset_id = prefixer.create_prefix()
full_dataset_id = f"{project_id}.{dataset_id}"
dataset = bigquery.Dataset(full_dataset_id)
dataset.location = "us-east1"
bigquery_client.create_dataset(dataset)
yield dataset_id
bigquery_client.delete_dataset(dataset, delete_contents=True, not_found_ok=True)


@pytest.fixture(scope="session")
def table_id_us_east1(
bigquery_client: bigquery.Client, project_id: str, dataset_id_us_east1: str
):
table_id = prefixer.create_prefix()
full_table_id = f"{project_id}.{dataset_id_us_east1}.{table_id}"
table = bigquery.Table(
full_table_id, schema=[bigquery.SchemaField("string_col", "STRING")]
)
bigquery_client.create_table(table)
yield full_table_id
bigquery_client.delete_table(table, not_found_ok=True)


@pytest.fixture
def random_table_id(bigquery_client: bigquery.Client, project_id: str, dataset_id: str):
"""Create a new table ID each time, so random_table_id can be used as
Expand Down
44 changes: 44 additions & 0 deletions samples/snippets/delete_job.py
@@ -0,0 +1,44 @@
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


def delete_job_metadata(job_id: str, location: str):
orig_job_id = job_id
orig_location = location
# [START bigquery_delete_job]
from google.cloud import bigquery
from google.api_core import exceptions

# TODO(developer): Set the job ID to the ID of the job whose metadata you
# wish to delete.
job_id = "abcd-efgh-ijkl-mnop"

# TODO(developer): Set the location to the region or multi-region
# containing the job.
location = "us-east1"

# [END bigquery_delete_job]
job_id = orig_job_id
location = orig_location

# [START bigquery_delete_job]
client = bigquery.Client()

client.delete_job_metadata(job_id, location=location)

try:
client.get_job(job_id, location=location)
except exceptions.NotFound:
print(f"Job metadata for job {location}:{job_id} was deleted.")
# [END bigquery_delete_job]
33 changes: 33 additions & 0 deletions samples/snippets/delete_job_test.py
@@ -0,0 +1,33 @@
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from google.cloud import bigquery

import delete_job


def test_delete_job_metadata(
capsys, bigquery_client: bigquery.Client, table_id_us_east1: str
):
query_job: bigquery.QueryJob = bigquery_client.query(
f"SELECT COUNT(*) FROM `{table_id_us_east1}`", location="us-east1",
)
query_job.result()
assert query_job.job_id is not None

delete_job.delete_job_metadata(query_job.job_id, "us-east1")

out, _ = capsys.readouterr()
assert "deleted" in out
assert f"us-east1:{query_job.job_id}" in out
17 changes: 0 additions & 17 deletions tests/system/test_client.py
Expand Up @@ -63,7 +63,6 @@
from google.cloud import bigquery_v2
from google.cloud.bigquery.dataset import Dataset
from google.cloud.bigquery.dataset import DatasetReference
from google.cloud.bigquery.schema import SchemaField
from google.cloud.bigquery.table import Table
from google.cloud._helpers import UTC
from google.cloud.bigquery import dbapi, enums
Expand Down Expand Up @@ -506,22 +505,6 @@ def test_delete_dataset_delete_contents_false(self):
with self.assertRaises(exceptions.BadRequest):
Config.CLIENT.delete_dataset(dataset)

def test_delete_job_metadata(self):
dataset_id = _make_dataset_id("us_east1")
self.temp_dataset(dataset_id, location="us-east1")
full_table_id = f"{Config.CLIENT.project}.{dataset_id}.test_delete_job_metadata"
table = Table(full_table_id, schema=[SchemaField("col", "STRING")])
Config.CLIENT.create_table(table)
query_job: bigquery.QueryJob = Config.CLIENT.query(
f"SELECT COUNT(*) FROM `{full_table_id}`", location="us-east1",
)
query_job.result()
self.assertIsNotNone(Config.CLIENT.get_job(query_job))

Config.CLIENT.delete_job_metadata(query_job)
with self.assertRaises(NotFound):
Config.CLIENT.get_job(query_job)

def test_get_table_w_public_dataset(self):
public = "bigquery-public-data"
dataset_id = "samples"
Expand Down

0 comments on commit be9b242

Please sign in to comment.