Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: accept DatasetListItem where DatasetReference is accepted #597

Merged
merged 20 commits into from Apr 12, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
61 changes: 26 additions & 35 deletions google/cloud/bigquery/client.py
Expand Up @@ -449,6 +449,22 @@ def _create_bqstorage_client(self):

return bigquery_storage.BigQueryReadClient(credentials=self._credentials)

def _dataset_from_arg(self, dataset):
if isinstance(dataset, str):
dataset = DatasetReference.from_string(
dataset, default_project=self.project
)

if not isinstance(dataset, (Dataset, DatasetReference)):
if isinstance(dataset, DatasetListItem):
dataset = dataset.reference
else:
raise TypeError(
"dataset must be a Dataset, DatasetReference, DatasetListItem,"
" or string"
)
return dataset

def create_dataset(
self, dataset, exists_ok=False, retry=DEFAULT_RETRY, timeout=None
):
Expand All @@ -461,6 +477,7 @@ def create_dataset(
dataset (Union[ \
google.cloud.bigquery.dataset.Dataset, \
google.cloud.bigquery.dataset.DatasetReference, \
google.cloud.bigquery.dataset.DatasetListItem, \
str, \
]):
A :class:`~google.cloud.bigquery.dataset.Dataset` to create.
Expand Down Expand Up @@ -491,10 +508,7 @@ def create_dataset(
>>> dataset = client.create_dataset(dataset)

"""
if isinstance(dataset, str):
dataset = DatasetReference.from_string(
dataset, default_project=self.project
)
dataset = self._dataset_from_arg(dataset)
if isinstance(dataset, DatasetReference):
dataset = Dataset(dataset)

Expand Down Expand Up @@ -1133,6 +1147,7 @@ def list_models(
dataset (Union[ \
google.cloud.bigquery.dataset.Dataset, \
google.cloud.bigquery.dataset.DatasetReference, \
google.cloud.bigquery.dataset.DatasetListItem, \
str, \
]):
A reference to the dataset whose models to list from the
Expand Down Expand Up @@ -1160,13 +1175,7 @@ def list_models(
:class:`~google.cloud.bigquery.model.Model` contained
within the requested dataset.
"""
if isinstance(dataset, str):
dataset = DatasetReference.from_string(
dataset, default_project=self.project
)

if not isinstance(dataset, (Dataset, DatasetReference)):
raise TypeError("dataset must be a Dataset, DatasetReference, or string")
dataset = self._dataset_from_arg(dataset)

path = "%s/models" % dataset.path
span_attributes = {"path": path}
Expand Down Expand Up @@ -1210,6 +1219,7 @@ def list_routines(
dataset (Union[ \
google.cloud.bigquery.dataset.Dataset, \
google.cloud.bigquery.dataset.DatasetReference, \
google.cloud.bigquery.dataset.DatasetListItem, \
str, \
]):
A reference to the dataset whose routines to list from the
Expand Down Expand Up @@ -1237,14 +1247,7 @@ def list_routines(
:class:`~google.cloud.bigquery.routine.Routine`s contained
within the requested dataset, limited by ``max_results``.
"""
if isinstance(dataset, str):
dataset = DatasetReference.from_string(
dataset, default_project=self.project
)

if not isinstance(dataset, (Dataset, DatasetReference)):
raise TypeError("dataset must be a Dataset, DatasetReference, or string")

dataset = self._dataset_from_arg(dataset)
path = "{}/routines".format(dataset.path)

span_attributes = {"path": path}
Expand Down Expand Up @@ -1288,6 +1291,7 @@ def list_tables(
dataset (Union[ \
google.cloud.bigquery.dataset.Dataset, \
google.cloud.bigquery.dataset.DatasetReference, \
google.cloud.bigquery.dataset.DatasetListItem, \
str, \
]):
A reference to the dataset whose tables to list from the
Expand Down Expand Up @@ -1315,14 +1319,7 @@ def list_tables(
:class:`~google.cloud.bigquery.table.TableListItem` contained
within the requested dataset.
"""
if isinstance(dataset, str):
dataset = DatasetReference.from_string(
dataset, default_project=self.project
)

if not isinstance(dataset, (Dataset, DatasetReference)):
raise TypeError("dataset must be a Dataset, DatasetReference, or string")

dataset = self._dataset_from_arg(dataset)
path = "%s/tables" % dataset.path
span_attributes = {"path": path}

Expand Down Expand Up @@ -1365,6 +1362,7 @@ def delete_dataset(
dataset (Union[ \
google.cloud.bigquery.dataset.Dataset, \
google.cloud.bigquery.dataset.DatasetReference, \
google.cloud.bigquery.dataset.DatasetListItem, \
str, \
]):
A reference to the dataset to delete. If a string is passed
Expand All @@ -1384,14 +1382,7 @@ def delete_dataset(
Defaults to ``False``. If ``True``, ignore "not found" errors
when deleting the dataset.
"""
if isinstance(dataset, str):
dataset = DatasetReference.from_string(
dataset, default_project=self.project
)

if not isinstance(dataset, (Dataset, DatasetReference)):
raise TypeError("dataset must be a Dataset or a DatasetReference")

dataset = self._dataset_from_arg(dataset)
params = {}
path = dataset.path
if delete_contents:
Expand Down
23 changes: 23 additions & 0 deletions tests/unit/conftest.py
@@ -0,0 +1,23 @@
import pytest

from .helpers import make_client


@pytest.fixture
def client():
yield make_client()


@pytest.fixture
def PROJECT():
yield "PROJECT"


@pytest.fixture
def DS_ID():
yield "DATASET_ID"


@pytest.fixture
def LOCATION():
yield "us-central"
49 changes: 49 additions & 0 deletions tests/unit/helpers.py
Expand Up @@ -12,6 +12,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import google.cloud.bigquery.client
import google.cloud.bigquery.dataset
import mock
import pytest


def make_connection(*responses):
import google.cloud.bigquery._http
Expand All @@ -31,3 +36,47 @@ def _to_pyarrow(value):
import pyarrow

return pyarrow.array([value])[0]


def make_client(project="PROJECT", **kw):
credentials = mock.Mock(spec=google.auth.credentials.Credentials)
return google.cloud.bigquery.client.Client(project, credentials, **kw)


def make_dataset_reference_string(project, ds_id):
return f"{project}.{ds_id}"


def make_dataset(project, ds_id):
return google.cloud.bigquery.dataset.Dataset(
google.cloud.bigquery.dataset.DatasetReference(project, ds_id)
)


def make_dataset_list_item(project, ds_id):
return google.cloud.bigquery.dataset.DatasetListItem(
dict(datasetReference=dict(projectId=project, datasetId=ds_id))
)


def identity(x):
return x


def get_reference(x):
return x.reference


dataset_like = [
(google.cloud.bigquery.dataset.DatasetReference, identity),
(make_dataset, identity),
(make_dataset_list_item, get_reference),
(
make_dataset_reference_string,
google.cloud.bigquery.dataset.DatasetReference.from_string,
),
]

dataset_polymorphic = pytest.mark.parametrize(
"make_dataset,get_reference", dataset_like
)