Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We鈥檒l occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: list_tables, list_projects, list_datasets, list_models, list_routines, and list_jobs now accept a page_size parameter to control page size #686

Merged
merged 15 commits into from Jun 6, 2021
Merged
Show file tree
Hide file tree
Changes from 12 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
29 changes: 29 additions & 0 deletions google/cloud/bigquery/client.py
Expand Up @@ -286,6 +286,7 @@ def list_projects(
page_token: str = None,
retry: retries.Retry = DEFAULT_RETRY,
timeout: float = None,
page_size: int = None,
) -> page_iterator.Iterator:
"""List projects for the project associated with this client.

Expand All @@ -310,6 +311,10 @@ def list_projects(
The number of seconds to wait for the underlying HTTP transport
before using ``retry``.

page_size (Optional[int]):
Maximum number of projects to return in each page, If not passed,
plamut marked this conversation as resolved.
Show resolved Hide resolved
defaults to a value set by the API.

Returns:
google.api_core.page_iterator.Iterator:
Iterator of :class:`~google.cloud.bigquery.client.Project`
Expand All @@ -335,6 +340,7 @@ def api_request(*args, **kwargs):
items_key="projects",
page_token=page_token,
max_results=max_results,
page_size=page_size,
)

def list_datasets(
Expand All @@ -346,6 +352,7 @@ def list_datasets(
page_token: str = None,
retry: retries.Retry = DEFAULT_RETRY,
timeout: float = None,
page_size: int = None,
) -> page_iterator.Iterator:
"""List datasets for the project associated with this client.

Expand Down Expand Up @@ -375,6 +382,8 @@ def list_datasets(
timeout (Optional[float]):
The number of seconds to wait for the underlying HTTP transport
before using ``retry``.
page_size (Optional[int]):
Maximum number of datasets to return per page.

Returns:
google.api_core.page_iterator.Iterator:
Expand Down Expand Up @@ -414,6 +423,7 @@ def api_request(*args, **kwargs):
page_token=page_token,
max_results=max_results,
extra_params=extra_params,
page_size=page_size,
)

def dataset(self, dataset_id: str, project: str = None) -> DatasetReference:
Expand Down Expand Up @@ -1270,6 +1280,7 @@ def list_models(
page_token: str = None,
retry: retries.Retry = DEFAULT_RETRY,
timeout: float = None,
page_size: int = None,
) -> page_iterator.Iterator:
"""[Beta] List models in the dataset.

Expand Down Expand Up @@ -1301,6 +1312,9 @@ def list_models(
timeout (Optional[float]):
The number of seconds to wait for the underlying HTTP transport
before using ``retry``.
page_size (Optional[int]):
Maximum number of models to return per page. If not
passed, defaults to a value set by the API.

Returns:
google.api_core.page_iterator.Iterator:
Expand Down Expand Up @@ -1331,6 +1345,7 @@ def api_request(*args, **kwargs):
items_key="models",
page_token=page_token,
max_results=max_results,
page_size=page_size,
)
result.dataset = dataset
return result
Expand All @@ -1342,6 +1357,7 @@ def list_routines(
page_token: str = None,
retry: retries.Retry = DEFAULT_RETRY,
timeout: float = None,
page_size: int = None,
) -> page_iterator.Iterator:
"""[Beta] List routines in the dataset.

Expand Down Expand Up @@ -1373,6 +1389,9 @@ def list_routines(
timeout (Optional[float]):
The number of seconds to wait for the underlying HTTP transport
before using ``retry``.
page_size (Optional[int]):
Maximum number of routines to return per page. If not
passed, defaults to a value set by the API.

Returns:
google.api_core.page_iterator.Iterator:
Expand Down Expand Up @@ -1403,6 +1422,7 @@ def api_request(*args, **kwargs):
items_key="routines",
page_token=page_token,
max_results=max_results,
page_size=page_size,
)
result.dataset = dataset
return result
Expand All @@ -1414,6 +1434,7 @@ def list_tables(
page_token: str = None,
retry: retries.Retry = DEFAULT_RETRY,
timeout: float = None,
page_size: int = None,
) -> page_iterator.Iterator:
"""List tables in the dataset.

Expand Down Expand Up @@ -1445,6 +1466,9 @@ def list_tables(
timeout (Optional[float]):
The number of seconds to wait for the underlying HTTP transport
before using ``retry``.
page_size (Optional[int]):
Maximum number of tables to return per page. If not
passed, defaults to a value set by the API.

Returns:
google.api_core.page_iterator.Iterator:
Expand Down Expand Up @@ -1474,6 +1498,7 @@ def api_request(*args, **kwargs):
items_key="tables",
page_token=page_token,
max_results=max_results,
page_size=page_size,
)
result.dataset = dataset
return result
Expand Down Expand Up @@ -2112,6 +2137,7 @@ def list_jobs(
timeout: float = None,
min_creation_time: datetime.datetime = None,
max_creation_time: datetime.datetime = None,
page_size: int = None,
) -> page_iterator.Iterator:
"""List jobs for the project associated with this client.

Expand Down Expand Up @@ -2157,6 +2183,8 @@ def list_jobs(
Max value for job creation time. If set, only jobs created
before or at this timestamp are returned. If the datetime has
no time zone assumes UTC time.
page_size (Optional[int]):
Maximum number of jobs to return per page.

Returns:
google.api_core.page_iterator.Iterator:
Expand Down Expand Up @@ -2208,6 +2236,7 @@ def api_request(*args, **kwargs):
page_token=page_token,
max_results=max_results,
extra_params=extra_params,
page_size=page_size,
)

def load_table_from_uri(
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Expand Up @@ -29,7 +29,7 @@
# 'Development Status :: 5 - Production/Stable'
release_status = "Development Status :: 5 - Production/Stable"
dependencies = [
"google-api-core[grpc] >= 1.23.0, < 2.0.0dev",
"google-api-core[grpc] >= 1.29.0, < 2.0.0dev",
"proto-plus >= 1.10.0",
"google-cloud-core >= 1.4.1, < 2.0dev",
"google-resumable-media >= 0.6.0, < 2.0dev",
Expand Down
2 changes: 1 addition & 1 deletion testing/constraints-3.6.txt
Expand Up @@ -5,7 +5,7 @@
#
# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev",
# Then this file should have foo==1.14.0
google-api-core==1.23.0
google-api-core==1.29.0
google-cloud-bigquery-storage==2.0.0
google-cloud-core==1.4.1
google-resumable-media==0.6.0
Expand Down