Skip to content
This repository has been archived by the owner on Jul 6, 2023. It is now read-only.

Commit

Permalink
samples: export data to BigQuery (#45)
Browse files Browse the repository at this point in the history
  • Loading branch information
TrucHLe committed Sep 13, 2021
1 parent 9177b62 commit 335cc97
Show file tree
Hide file tree
Showing 5 changed files with 134 additions and 20 deletions.
42 changes: 42 additions & 0 deletions samples/snippets/export_to_bigquery.py
@@ -0,0 +1,42 @@
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# [START contactcenterinsights_export_to_bigquery]
from google.cloud import contact_center_insights_v1


def export_to_bigquery(
project_id: str,
bigquery_project_id: str,
bigquery_dataset_id: str,
bigquery_table_id: str,
) -> None:
# Construct an export request.
request = contact_center_insights_v1.ExportInsightsDataRequest()
request.parent = contact_center_insights_v1.ContactCenterInsightsClient.common_location_path(
project_id, "us-central1"
)
request.big_query_destination.project_id = bigquery_project_id
request.big_query_destination.dataset = bigquery_dataset_id
request.big_query_destination.table = bigquery_table_id
request.filter = 'agent_id="007"'

# Call the Insights client to export data to BigQuery.
insights_client = contact_center_insights_v1.ContactCenterInsightsClient()
export_operation = insights_client.export_insights_data(request=request)
export_operation.result(timeout=600000)
print("Exported data to BigQuery")


# [END contactcenterinsights_export_to_bigquery]
44 changes: 25 additions & 19 deletions samples/snippets/noxfile.py
Expand Up @@ -39,31 +39,29 @@

TEST_CONFIG = {
# You can opt out from the test for specific Python versions.
'ignored_versions': [],

"ignored_versions": [],
# Old samples are opted out of enforcing Python type hints
# All new samples should feature them
'enforce_type_hints': False,

"enforce_type_hints": False,
# An envvar key for determining the project id to use. Change it
# to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
# build specific Cloud project. You can also use your own string
# to use your own Cloud project.
'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT',
"gcloud_project_env": "GOOGLE_CLOUD_PROJECT",
# 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
# If you need to use a specific version of pip,
# change pip_version_override to the string representation
# of the version number, for example, "20.2.4"
"pip_version_override": None,
# A dictionary you want to inject into your test. Don't put any
# secrets here. These values will override predefined values.
'envs': {},
"envs": {},
}


try:
# Ensure we can import noxfile_config in the project's directory.
sys.path.append('.')
sys.path.append(".")
from noxfile_config import TEST_CONFIG_OVERRIDE
except ImportError as e:
print("No user noxfile_config found: detail: {}".format(e))
Expand All @@ -78,12 +76,12 @@ def get_pytest_env_vars() -> Dict[str, str]:
ret = {}

# Override the GCLOUD_PROJECT and the alias.
env_key = TEST_CONFIG['gcloud_project_env']
env_key = TEST_CONFIG["gcloud_project_env"]
# This should error out if not set.
ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key]
ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key]

# Apply user supplied envs.
ret.update(TEST_CONFIG['envs'])
ret.update(TEST_CONFIG["envs"])
return ret


Expand All @@ -92,11 +90,14 @@ def get_pytest_env_vars() -> Dict[str, str]:
ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9"]

# Any default versions that should be ignored.
IGNORED_VERSIONS = TEST_CONFIG['ignored_versions']
IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"]

TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS])

INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ("True", "true")
INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in (
"True",
"true",
)
#
# Style Checks
#
Expand Down Expand Up @@ -141,7 +142,7 @@ def _determine_local_import_names(start_dir: str) -> List[str]:

@nox.session
def lint(session: nox.sessions.Session) -> None:
if not TEST_CONFIG['enforce_type_hints']:
if not TEST_CONFIG["enforce_type_hints"]:
session.install("flake8", "flake8-import-order")
else:
session.install("flake8", "flake8-import-order", "flake8-annotations")
Expand All @@ -150,9 +151,11 @@ def lint(session: nox.sessions.Session) -> None:
args = FLAKE8_COMMON_ARGS + [
"--application-import-names",
",".join(local_names),
"."
".",
]
session.run("flake8", *args)


#
# Black
#
Expand All @@ -165,6 +168,7 @@ def blacken(session: nox.sessions.Session) -> None:

session.run("black", *python_files)


#
# Sample Tests
#
Expand All @@ -173,7 +177,9 @@ def blacken(session: nox.sessions.Session) -> None:
PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"]


def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None:
def _session_tests(
session: nox.sessions.Session, post_install: Callable = None
) -> None:
if TEST_CONFIG["pip_version_override"]:
pip_version = TEST_CONFIG["pip_version_override"]
session.install(f"pip=={pip_version}")
Expand Down Expand Up @@ -203,7 +209,7 @@ def _session_tests(session: nox.sessions.Session, post_install: Callable = None)
# on travis where slow and flaky tests are excluded.
# See http://doc.pytest.org/en/latest/_modules/_pytest/main.html
success_codes=[0, 5],
env=get_pytest_env_vars()
env=get_pytest_env_vars(),
)


Expand All @@ -213,9 +219,9 @@ def py(session: nox.sessions.Session) -> None:
if session.python in TESTED_VERSIONS:
_session_tests(session)
else:
session.skip("SKIPPED: {} tests are disabled for this sample.".format(
session.python
))
session.skip(
"SKIPPED: {} tests are disabled for this sample.".format(session.python)
)


#
Expand Down
1 change: 1 addition & 0 deletions samples/snippets/requirements.txt
@@ -1,2 +1,3 @@
google-api-core==2.0.1
google-cloud-bigquery==2.26.0
google-cloud-contact-center-insights==0.2.0
2 changes: 1 addition & 1 deletion samples/snippets/test_enable_pubsub_notifications.py
Expand Up @@ -70,7 +70,7 @@ def disable_pubsub_notifications(project_id):


def test_enable_pubsub_notifications(
capsys, project_id, pubsub_topics, disable_pubsub_notifications
capsys, project_id, pubsub_topics, disable_pubsub_notifications
):
conversation_topic, analysis_topic = pubsub_topics

Expand Down
65 changes: 65 additions & 0 deletions samples/snippets/test_export_to_bigquery.py
@@ -0,0 +1,65 @@
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import uuid

import google.auth

from google.cloud import bigquery

import pytest

import export_to_bigquery

GCLOUD_TESTS_PREFIX = "python_samples_tests"


@pytest.fixture
def project_id():
_, project_id = google.auth.default()
return project_id


@pytest.fixture
def unique_id():
uuid_hex = uuid.uuid4().hex[:8]
return f"{GCLOUD_TESTS_PREFIX}_{uuid_hex}"


@pytest.fixture
def bigquery_resources(project_id, unique_id):
# Create a BigQuery dataset.
bigquery_client = bigquery.Client()
dataset_id = unique_id
table_id = unique_id

dataset = bigquery.Dataset(f"{project_id}.{dataset_id}")
dataset.location = "US"
bigquery_client.create_dataset(dataset, timeout=30)

# Create a BigQuery table under the created dataset.
table = bigquery.Table(f"{project_id}.{dataset_id}.{table_id}")
bigquery_client.create_table(table)

yield dataset_id, table_id

# Delete the BigQuery dataset and table.
bigquery_client.delete_dataset(dataset_id, delete_contents=True)


def test_export_data_to_bigquery(capsys, project_id, bigquery_resources):
dataset_id, table_id = bigquery_resources
export_to_bigquery.export_to_bigquery(project_id, project_id, dataset_id, table_id)
out, err = capsys.readouterr()
assert "Exported data to BigQuery" in out

0 comments on commit 335cc97

Please sign in to comment.