From 79dbf5dd406151fd62174f72de2627b9241fd9e9 Mon Sep 17 00:00:00 2001 From: Dipannita Banerjee <84458018+dipannitab2392@users.noreply.github.com> Date: Thu, 28 Oct 2021 19:50:14 +0530 Subject: [PATCH] feat: Onboard Austin Waste dataset (#200) --- .../_images/run_csv_transform_kub/Dockerfile | 37 ++++++ .../run_csv_transform_kub/csv_transform.py | 113 ++++++++++++++++++ .../run_csv_transform_kub/requirements.txt | 3 + .../_terraform/austin_waste_dataset.tf | 26 ++++ datasets/austin_waste/_terraform/provider.tf | 28 +++++ datasets/austin_waste/_terraform/variables.tf | 23 ++++ .../waste_and_diversion_pipeline.tf | 39 ++++++ datasets/austin_waste/dataset.yaml | 26 ++++ .../waste_and_diversion/pipeline.yaml | 96 +++++++++++++++ .../waste_and_diversion_dag.py | 78 ++++++++++++ 10 files changed, 469 insertions(+) create mode 100644 datasets/austin_waste/_images/run_csv_transform_kub/Dockerfile create mode 100644 datasets/austin_waste/_images/run_csv_transform_kub/csv_transform.py create mode 100644 datasets/austin_waste/_images/run_csv_transform_kub/requirements.txt create mode 100644 datasets/austin_waste/_terraform/austin_waste_dataset.tf create mode 100644 datasets/austin_waste/_terraform/provider.tf create mode 100644 datasets/austin_waste/_terraform/variables.tf create mode 100644 datasets/austin_waste/_terraform/waste_and_diversion_pipeline.tf create mode 100644 datasets/austin_waste/dataset.yaml create mode 100644 datasets/austin_waste/waste_and_diversion/pipeline.yaml create mode 100644 datasets/austin_waste/waste_and_diversion/waste_and_diversion_dag.py diff --git a/datasets/austin_waste/_images/run_csv_transform_kub/Dockerfile b/datasets/austin_waste/_images/run_csv_transform_kub/Dockerfile new file mode 100644 index 000000000..7265a1b71 --- /dev/null +++ b/datasets/austin_waste/_images/run_csv_transform_kub/Dockerfile @@ -0,0 +1,37 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# The base image for this build +FROM python:3.8 + +# Allow statements and log messages to appear in Cloud logs +ENV PYTHONUNBUFFERED True + +# Copy the requirements file into the image +COPY requirements.txt ./ + +# Install the packages specified in the requirements file +RUN python3 -m pip install --no-cache-dir -r requirements.txt + +# The WORKDIR instruction sets the working directory for any RUN, CMD, +# ENTRYPOINT, COPY and ADD instructions that follow it in the Dockerfile. +# If the WORKDIR doesn’t exist, it will be created even if it’s not used in +# any subsequent Dockerfile instruction +WORKDIR /custom + +# Copy the specific data processing script/s in the image under /custom/* +COPY ./csv_transform.py . + +# Command to run the data processing script when the container is run +CMD ["python3", "csv_transform.py"] diff --git a/datasets/austin_waste/_images/run_csv_transform_kub/csv_transform.py b/datasets/austin_waste/_images/run_csv_transform_kub/csv_transform.py new file mode 100644 index 000000000..ebffd8dc0 --- /dev/null +++ b/datasets/austin_waste/_images/run_csv_transform_kub/csv_transform.py @@ -0,0 +1,113 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import json +import logging +import os +import pathlib +import typing +from datetime import datetime + +import pandas as pd +import requests +from google.cloud import storage + + +def main( + source_url: str, + source_file: pathlib.Path, + target_file: pathlib.Path, + target_gcs_bucket: str, + target_gcs_path: str, + headers: typing.List[str], + rename_mappings: dict, +) -> None: + + logging.info("Creating 'files' folder") + pathlib.Path("./files").mkdir(parents=True, exist_ok=True) + + logging.info(f"Downloading file {source_url}") + download_file(source_url, source_file) + df = pd.read_csv(str(source_file)) + + logging.info(f"Transforming.. {source_file}") + + logging.info("Transform: Renaming headers...") + rename_headers(df, rename_mappings) + + logging.info("Transform: Converting the date format...") + df["report_date"] = df["report_date"].apply( + lambda x: datetime.strptime(x, "%m/%d/%Y").strftime("%Y-%m-%d") + ) + df["load_time"] = df["load_time"].apply( + lambda x: datetime.strptime(x, "%m/%d/%Y %H:%M:%S %p").strftime( + "%Y-%m-%d %H:%M:%S" + ) + ) + + logging.info("Transform: Reordering headers..") + df = df[headers] + + logging.info(f"Saving to output file.. {target_file}") + try: + save_to_new_file(df, file_path=str(target_file)) + except Exception as e: + logging.error(f"Error saving output file: {e}.") + logging.info("..Done!") + + logging.info( + f"Uploading output file to.. gs://{target_gcs_bucket}/{target_gcs_path}" + ) + upload_file_to_gcs(target_file, target_gcs_bucket, target_gcs_path) + + +def rename_headers(df: pd.DataFrame, rename_mappings: dict) -> None: + df = df.rename(columns=rename_mappings, inplace=True) + + +def save_to_new_file(df, file_path): + df.to_csv(file_path, index=False) + + +def download_file(source_url: str, source_file: pathlib.Path) -> None: + logging.info(f"Downloading {source_url} into {source_file}") + r = requests.get(source_url, stream=True) + if r.status_code == 200: + with open(source_file, "wb") as f: + for chunk in r: + f.write(chunk) + else: + logging.error(f"Couldn't download {source_url}: {r.text}") + + +def upload_file_to_gcs(file_path: pathlib.Path, gcs_bucket: str, gcs_path: str) -> None: + storage_client = storage.Client() + bucket = storage_client.bucket(gcs_bucket) + blob = bucket.blob(gcs_path) + blob.upload_from_filename(file_path) + + +if __name__ == "__main__": + logging.getLogger().setLevel(logging.INFO) + + main( + source_url=os.environ["SOURCE_URL"], + source_file=pathlib.Path(os.environ["SOURCE_FILE"]).expanduser(), + target_file=pathlib.Path(os.environ["TARGET_FILE"]).expanduser(), + target_gcs_bucket=os.environ["TARGET_GCS_BUCKET"], + target_gcs_path=os.environ["TARGET_GCS_PATH"], + headers=json.loads(os.environ["CSV_HEADERS"]), + rename_mappings=json.loads(os.environ["RENAME_MAPPINGS"]), + ) diff --git a/datasets/austin_waste/_images/run_csv_transform_kub/requirements.txt b/datasets/austin_waste/_images/run_csv_transform_kub/requirements.txt new file mode 100644 index 000000000..f36704793 --- /dev/null +++ b/datasets/austin_waste/_images/run_csv_transform_kub/requirements.txt @@ -0,0 +1,3 @@ +requests +pandas +google-cloud-storage diff --git a/datasets/austin_waste/_terraform/austin_waste_dataset.tf b/datasets/austin_waste/_terraform/austin_waste_dataset.tf new file mode 100644 index 000000000..b32f8587a --- /dev/null +++ b/datasets/austin_waste/_terraform/austin_waste_dataset.tf @@ -0,0 +1,26 @@ +/** + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +resource "google_bigquery_dataset" "austin_waste" { + dataset_id = "austin_waste" + project = var.project_id + description = "austin waste and diversion" +} + +output "bigquery_dataset-austin_waste-dataset_id" { + value = google_bigquery_dataset.austin_waste.dataset_id +} diff --git a/datasets/austin_waste/_terraform/provider.tf b/datasets/austin_waste/_terraform/provider.tf new file mode 100644 index 000000000..23ab87dcd --- /dev/null +++ b/datasets/austin_waste/_terraform/provider.tf @@ -0,0 +1,28 @@ +/** + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +provider "google" { + project = var.project_id + impersonate_service_account = var.impersonating_acct + region = var.region +} + +data "google_client_openid_userinfo" "me" {} + +output "impersonating-account" { + value = data.google_client_openid_userinfo.me.email +} diff --git a/datasets/austin_waste/_terraform/variables.tf b/datasets/austin_waste/_terraform/variables.tf new file mode 100644 index 000000000..c3ec7c506 --- /dev/null +++ b/datasets/austin_waste/_terraform/variables.tf @@ -0,0 +1,23 @@ +/** + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +variable "project_id" {} +variable "bucket_name_prefix" {} +variable "impersonating_acct" {} +variable "region" {} +variable "env" {} + diff --git a/datasets/austin_waste/_terraform/waste_and_diversion_pipeline.tf b/datasets/austin_waste/_terraform/waste_and_diversion_pipeline.tf new file mode 100644 index 000000000..b8a8a8784 --- /dev/null +++ b/datasets/austin_waste/_terraform/waste_and_diversion_pipeline.tf @@ -0,0 +1,39 @@ +/** + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +resource "google_bigquery_table" "waste_and_diversion" { + project = var.project_id + dataset_id = "austin_waste" + table_id = "waste_and_diversion" + + description = "Austin waste and diversion" + + + + + depends_on = [ + google_bigquery_dataset.austin_waste + ] +} + +output "bigquery_table-waste_and_diversion-table_id" { + value = google_bigquery_table.waste_and_diversion.table_id +} + +output "bigquery_table-waste_and_diversion-id" { + value = google_bigquery_table.waste_and_diversion.id +} diff --git a/datasets/austin_waste/dataset.yaml b/datasets/austin_waste/dataset.yaml new file mode 100644 index 000000000..e05786346 --- /dev/null +++ b/datasets/austin_waste/dataset.yaml @@ -0,0 +1,26 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +dataset: + name: austin_waste + friendly_name: austin waste and diversion + description: austin-based datasets + dataset_sources: ~ + terms_of_use: ~ + + +resources: + - type: bigquery_dataset + dataset_id: austin_waste + description: austin waste and diversion diff --git a/datasets/austin_waste/waste_and_diversion/pipeline.yaml b/datasets/austin_waste/waste_and_diversion/pipeline.yaml new file mode 100644 index 000000000..e3f57c770 --- /dev/null +++ b/datasets/austin_waste/waste_and_diversion/pipeline.yaml @@ -0,0 +1,96 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +resources: + + - type: bigquery_table + table_id: waste_and_diversion + description: "Austin waste and diversion" + +dag: + airflow_version: 1 + initialize: + dag_id: waste_and_diversion + default_args: + owner: "Google" + depends_on_past: False + start_date: '2021-03-01' + max_active_runs: 1 + schedule_interval: "@daily" + catchup: False + default_view: graph + tasks: + - operator: "KubernetesPodOperator" + description: "Run CSV transform within kubernetes pod" + args: + task_id: "austin_waste_transform_csv" + startup_timeout_seconds: 600 + name: "austin_waste" + namespace: "default" + image_pull_policy: "Always" + image: "{{ var.json.austin_waste.container_registry.run_csv_transform_kub }}" + env_vars: + SOURCE_URL: "https://data.austintexas.gov/api/views/mbnu-4wq9/rows.csv" + SOURCE_FILE: "files/data.csv" + TARGET_FILE: "files/data_output.csv" + TARGET_GCS_BUCKET: "{{ var.value.composer_bucket }}" + TARGET_GCS_PATH: "data/austin_waste/waste_and_diversion/data_output.csv" + CSV_HEADERS: >- + [ "load_id", "report_date", "load_type", "load_time", "load_weight", "dropoff_site", "route_type", "route_number"] + RENAME_MAPPINGS: >- + {"Load ID": "load_id","Report Date": "report_date","Load Type": "load_type","Load Time": "load_time","Load Weight": "load_weight","Dropoff Site": "dropoff_site","Route Type": "route_type","Route Number": "route_number"} + resources: + limit_memory: "2G" + limit_cpu: "1" + + - operator: "GoogleCloudStorageToBigQueryOperator" + description: "Task to load CSV data to a BigQuery table" + args: + task_id: "load_austin_waste_and_diversion_to_bq" + bucket: "{{ var.value.composer_bucket }}" + source_objects: ["data/austin_waste/waste_and_diversion/data_output.csv"] + source_format: "CSV" + destination_project_dataset_table: "austin_waste.waste_and_diversion" + skip_leading_rows: 1 + write_disposition: "WRITE_TRUNCATE" + + schema_fields: + - name: "load_id" + type: "INTEGER" + mode: "NULLABLE" + - name: "report_date" + type: "DATE" + mode: "NULLABLE" + - name: "load_type" + type: "STRING" + mode: "NULLABLE" + - name: "load_time" + type: "TIMESTAMP" + mode: "NULLABLE" + - name: "load_weight" + type: "FLOAT" + mode: "NULLABLE" + - name: "dropoff_site" + type: "STRING" + mode: "NULLABLE" + - name: "route_type" + type: "STRING" + mode: "NULLABLE" + - name: "route_number" + type: "STRING" + mode: "NULLABLE" + + graph_paths: + - "austin_waste_transform_csv >> load_austin_waste_and_diversion_to_bq" diff --git a/datasets/austin_waste/waste_and_diversion/waste_and_diversion_dag.py b/datasets/austin_waste/waste_and_diversion/waste_and_diversion_dag.py new file mode 100644 index 000000000..1b7dd9a2b --- /dev/null +++ b/datasets/austin_waste/waste_and_diversion/waste_and_diversion_dag.py @@ -0,0 +1,78 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from airflow import DAG +from airflow.contrib.operators import gcs_to_bq, kubernetes_pod_operator + +default_args = { + "owner": "Google", + "depends_on_past": False, + "start_date": "2021-03-01", +} + + +with DAG( + dag_id="austin_waste.waste_and_diversion", + default_args=default_args, + max_active_runs=1, + schedule_interval="@daily", + catchup=False, + default_view="graph", +) as dag: + + # Run CSV transform within kubernetes pod + austin_waste_transform_csv = kubernetes_pod_operator.KubernetesPodOperator( + task_id="austin_waste_transform_csv", + startup_timeout_seconds=600, + name="austin_waste", + namespace="default", + image_pull_policy="Always", + image="{{ var.json.austin_waste.container_registry.run_csv_transform_kub }}", + env_vars={ + "SOURCE_URL": "https://data.austintexas.gov/api/views/mbnu-4wq9/rows.csv", + "SOURCE_FILE": "files/data.csv", + "TARGET_FILE": "files/data_output.csv", + "TARGET_GCS_BUCKET": "{{ var.value.composer_bucket }}", + "TARGET_GCS_PATH": "data/austin_waste/waste_and_diversion/data_output.csv", + "CSV_HEADERS": '[ "load_id", "report_date", "load_type", "load_time", "load_weight", "dropoff_site", "route_type", "route_number"]', + "RENAME_MAPPINGS": '{"Load ID": "load_id","Report Date": "report_date","Load Type": "load_type","Load Time": "load_time","Load Weight": "load_weight","Dropoff Site": "dropoff_site","Route Type": "route_type","Route Number": "route_number"}', + }, + resources={"limit_memory": "2G", "limit_cpu": "1"}, + ) + + # Task to load CSV data to a BigQuery table + load_austin_waste_and_diversion_to_bq = ( + gcs_to_bq.GoogleCloudStorageToBigQueryOperator( + task_id="load_austin_waste_and_diversion_to_bq", + bucket="{{ var.value.composer_bucket }}", + source_objects=["data/austin_waste/waste_and_diversion/data_output.csv"], + source_format="CSV", + destination_project_dataset_table="austin_waste.waste_and_diversion", + skip_leading_rows=1, + write_disposition="WRITE_TRUNCATE", + schema_fields=[ + {"name": "load_id", "type": "INTEGER", "mode": "NULLABLE"}, + {"name": "report_date", "type": "DATE", "mode": "NULLABLE"}, + {"name": "load_type", "type": "STRING", "mode": "NULLABLE"}, + {"name": "load_time", "type": "TIMESTAMP", "mode": "NULLABLE"}, + {"name": "load_weight", "type": "FLOAT", "mode": "NULLABLE"}, + {"name": "dropoff_site", "type": "STRING", "mode": "NULLABLE"}, + {"name": "route_type", "type": "STRING", "mode": "NULLABLE"}, + {"name": "route_number", "type": "STRING", "mode": "NULLABLE"}, + ], + ) + ) + + austin_waste_transform_csv >> load_austin_waste_and_diversion_to_bq