Skip to content

Commit

Permalink
chore: Regenerate DAG files for new_york dataset (#210)
Browse files Browse the repository at this point in the history
  • Loading branch information
adlersantos committed Oct 14, 2021
1 parent 697f7be commit c9b8b7a
Show file tree
Hide file tree
Showing 3 changed files with 12 additions and 9 deletions.
Expand Up @@ -14,7 +14,8 @@


from airflow import DAG
from airflow.contrib.operators import gcs_to_bq, kubernetes_pod_operator
from airflow.providers.cncf.kubernetes.operators import kubernetes_pod
from airflow.providers.google.cloud.transfers import gcs_to_bigquery

default_args = {
"owner": "Google",
Expand All @@ -33,7 +34,7 @@
) as dag:

# Run CSV transform within kubernetes pod
transform_csv = kubernetes_pod_operator.KubernetesPodOperator(
transform_csv = kubernetes_pod.KubernetesPodOperator(
task_id="transform_csv",
name="311_service_requests",
namespace="default",
Expand All @@ -51,7 +52,7 @@
)

# Task to load CSV data to a BigQuery table
load_to_bq = gcs_to_bq.GoogleCloudStorageToBigQueryOperator(
load_to_bq = gcs_to_bigquery.GCSToBigQueryOperator(
task_id="load_to_bq",
bucket="{{ var.value.composer_bucket }}",
source_objects=["data/new_york/311_service_requests/data_output.csv"],
Expand Down
7 changes: 4 additions & 3 deletions datasets/new_york/citibike_stations/citibike_stations_dag.py
Expand Up @@ -14,7 +14,8 @@


from airflow import DAG
from airflow.contrib.operators import gcs_to_bq, kubernetes_pod_operator
from airflow.providers.cncf.kubernetes.operators import kubernetes_pod
from airflow.providers.google.cloud.transfers import gcs_to_bigquery

default_args = {
"owner": "Google",
Expand All @@ -33,7 +34,7 @@
) as dag:

# Run CSV transform within kubernetes pod
transform_csv = kubernetes_pod_operator.KubernetesPodOperator(
transform_csv = kubernetes_pod.KubernetesPodOperator(
task_id="transform_csv",
name="citibike_stations",
namespace="default",
Expand All @@ -52,7 +53,7 @@
)

# Task to load CSV data to a BigQuery table
load_to_bq = gcs_to_bq.GoogleCloudStorageToBigQueryOperator(
load_to_bq = gcs_to_bigquery.GCSToBigQueryOperator(
task_id="load_to_bq",
bucket="{{ var.value.composer_bucket }}",
source_objects=["data/new_york/citibike_stations/data_output.csv"],
Expand Down
7 changes: 4 additions & 3 deletions datasets/new_york/tree_census_1995/tree_census_1995_dag.py
Expand Up @@ -14,7 +14,8 @@


from airflow import DAG
from airflow.contrib.operators import gcs_to_bq, kubernetes_pod_operator
from airflow.providers.cncf.kubernetes.operators import kubernetes_pod
from airflow.providers.google.cloud.transfers import gcs_to_bigquery

default_args = {
"owner": "Google",
Expand All @@ -33,7 +34,7 @@
) as dag:

# Run CSV transform within kubernetes pod
transform_csv = kubernetes_pod_operator.KubernetesPodOperator(
transform_csv = kubernetes_pod.KubernetesPodOperator(
task_id="transform_csv",
name="tree_census_1995",
namespace="default",
Expand All @@ -51,7 +52,7 @@
)

# Task to load CSV data to a BigQuery table
load_to_bq = gcs_to_bq.GoogleCloudStorageToBigQueryOperator(
load_to_bq = gcs_to_bigquery.GCSToBigQueryOperator(
task_id="load_to_bq",
bucket="{{ var.value.composer_bucket }}",
source_objects=["data/new_york/tree_census_1995/data_output.csv"],
Expand Down

0 comments on commit c9b8b7a

Please sign in to comment.