Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Onboard Google CFE dataset #146

Merged
merged 5 commits into from Aug 27, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
39 changes: 39 additions & 0 deletions datasets/google_cfe/_terraform/datacenter_cfe_pipeline.tf
@@ -0,0 +1,39 @@
/**
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/


resource "google_bigquery_table" "datacenter_cfe" {
project = var.project_id
dataset_id = "google_cfe"
table_id = "datacenter_cfe"

description = "Carbon-free energy (CFE) scores for Google Cloud regions and other Google data center regions"




depends_on = [
google_bigquery_dataset.google_cfe
]
}

output "bigquery_table-datacenter_cfe-table_id" {
value = google_bigquery_table.datacenter_cfe.table_id
}

output "bigquery_table-datacenter_cfe-id" {
value = google_bigquery_table.datacenter_cfe.id
}
35 changes: 35 additions & 0 deletions datasets/google_cfe/_terraform/google_cfe_dataset.tf
@@ -0,0 +1,35 @@
/**
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/


resource "google_bigquery_dataset" "google_cfe" {
dataset_id = "google_cfe"
project = var.project_id
description = "Carbon-free energy (CFE) scores for Google Cloud regions and other Google data center regions"
}

output "bigquery_dataset-google_cfe-dataset_id" {
value = google_bigquery_dataset.google_cfe.dataset_id
}

resource "google_storage_bucket" "datacenter-cfe" {
name = "${var.bucket_name_prefix}-datacenter-cfe"
force_destroy = true
}

output "storage_bucket-datacenter-cfe-name" {
value = google_storage_bucket.datacenter-cfe.name
}
28 changes: 28 additions & 0 deletions datasets/google_cfe/_terraform/provider.tf
@@ -0,0 +1,28 @@
/**
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/


provider "google" {
project = var.project_id
impersonate_service_account = var.impersonating_acct
region = var.region
}

data "google_client_openid_userinfo" "me" {}

output "impersonating-account" {
value = data.google_client_openid_userinfo.me.email
}
23 changes: 23 additions & 0 deletions datasets/google_cfe/_terraform/variables.tf
@@ -0,0 +1,23 @@
/**
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/


variable "project_id" {}
variable "bucket_name_prefix" {}
variable "impersonating_acct" {}
variable "region" {}
variable "env" {}

86 changes: 86 additions & 0 deletions datasets/google_cfe/datacenter_cfe/datacenter_cfe_dag.py
@@ -0,0 +1,86 @@
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


from airflow import DAG
from airflow.providers.google.cloud.transfers import gcs_to_bigquery

default_args = {
"owner": "Google",
"depends_on_past": False,
"start_date": "2021-08-23",
}


with DAG(
dag_id="google_cfe.datacenter_cfe",
default_args=default_args,
max_active_runs=1,
schedule_interval="@once",
catchup=False,
default_view="graph",
) as dag:

# Task to load CSV data to a BigQuery table
cfe_gcs_to_bq = gcs_to_bigquery.GCSToBigQueryOperator(
task_id="cfe_gcs_to_bq",
bucket="{{ var.json.google_cfe.source_bucket }}",
source_objects=[
"data/2019-to-{{ macros.ds_format(macros.ds_add(ds, -366), '%Y-%m-%d', '%Y') }}.csv"
],
source_format="CSV",
destination_project_dataset_table="google_cfe.datacenter_cfe",
skip_leading_rows=1,
write_disposition="WRITE_TRUNCATE",
schema_fields=[
{
"name": "year",
"type": "INTEGER",
"mode": "REQUIRED",
"description": "The year for which the Google CFE metric has been aggregated. We will continue to add new data for each year as we make progress. Note that the Google CFE metric will only be included for the years that the Cloud Region or Data Center was operational. For example, the Data Center in Denmark came online in 2020, so the Google CFE data for that region starts in 2020 (and there is no data for Denmark for 2019).",
},
{
"name": "cfe_region",
"type": "STRING",
"mode": "REQUIRED",
"description": "The regional boundary of the electric grid we consider when calculating the Google CFE score. For most of the world, the CFE region is defined as the country. However, for countries that have distinct grids, such as the US, there are multiple CFE regions, which are defined by the balancing authority.",
},
{
"name": "zone_id",
"type": "STRING",
"mode": "REQUIRED",
"description": "This is the ID associated with the CFE Region based on Tomorrow's ElectricityMap API definition. (http://static.electricitymap.org/api/docs/index.html#zones)",
},
{
"name": "cloud_region",
"type": "STRING",
"mode": "REQUIRED",
"description": "The Google Cloud Region that is mapped to the CFE region. For Google Data Centers that are not Cloud Regions, the region will be labeled 'non-cloud-data-center'.",
},
{
"name": "location",
"type": "STRING",
"mode": "REQUIRED",
"description": 'This is the "friendly name" of the Cloud Region. For Google Data Centers that are not Cloud regions, the location will be the country (non-US) or the state (US) that the Data Center is located in.',
},
{
"name": "google_cfe",
"type": "FLOAT",
"mode": "NULLABLE",
"description": "This metric is calculated for every hour in every region and tells us what percentage of the energy we consumed during an hour that is carbon-free. We take into account the carbon-free energy that's already supplied by the grid, in addition to the investments we have made in renewable energy in that location to reach our 24/7 carbon-free objective (https://www.gstatic.com/gumdrop/sustainability/247-carbon-free-energy.pdf). We then aggregate the available average hourly CFE percentage for each region for the year. We do not currently have the hourly energy information available for calculating the metrics for all regions. We anticipate rolling out the calculated metrics using hourly data to regions as the data becomes available.",
},
],
)

cfe_gcs_to_bq
103 changes: 103 additions & 0 deletions datasets/google_cfe/datacenter_cfe/pipeline.yaml
@@ -0,0 +1,103 @@
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


---
resources:
# A list of GCP resources that are unique and specific to your pipeline.
#
# The currently supported resources are shown below. Use only the resources
# needed by your pipeline, and delete the rest of the examples.
#
# We will keep adding to the list below to support more Google Cloud resources
# over time. If a resource you need isn't supported, please file an issue on
# the repository.
- type: bigquery_table
# A Google BigQuery table to store your data. Requires a `bigquery_dataset`
# to be specified in the config (i.e. `dataset.yaml) for the dataset that
# this pipeline belongs in.
adlersantos marked this conversation as resolved.
Show resolved Hide resolved
table_id: datacenter_cfe
description: "Carbon-free energy (CFE) scores for Google Cloud regions and other Google data center regions"

dag:
# [Required] Specify the Airflow version of the operators used by the DAG.
airflow_version: 2

initialize:
dag_id: datacenter_cfe
default_args:
owner: "Google"
depends_on_past: False
start_date: '2021-08-23'
max_active_runs: 1
schedule_interval: "@once" # The pipeline will be run manually once a year after new data is released
catchup: False
default_view: graph

tasks:
- operator: "GoogleCloudStorageToBigQueryOperator"
# Initializes GCS to BQ task for the DAG. This operator is used to load a
# JSON, CSV, Avro, ORC, or Parquet data from GCS into a BigQuery table.

# Task description
description: "Task to load CSV data to a BigQuery table"

# Arguments supported by this operator:
# http://airflow.apache.org/docs/apache-airflow/stable/howto/operator/gcp/gcs.html#googlecloudstoragetobigqueryoperator
args:
task_id: "cfe_gcs_to_bq"

# The GCS bucket where the CSV file is located in.
bucket: "{{ var.json.google_cfe.source_bucket }}"

# Use the CSV file containing data from 2019 up to the previous year
source_objects: ["data/2019-to-{{ macros.ds_format(macros.ds_add(ds, -366), '%Y-%m-%d', '%Y') }}.csv"]
source_format: "CSV"
destination_project_dataset_table: "google_cfe.datacenter_cfe"

# Use this if your CSV file contains a header row
skip_leading_rows: 1

# How to write data to the table: overwrite, append, or write if empty
# See https://cloud.google.com/bigquery/docs/reference/auditlogs/rest/Shared.Types/WriteDisposition
write_disposition: "WRITE_TRUNCATE"

schema_fields:
- name: "year"
type: "INTEGER"
mode: "REQUIRED"
description: "The year for which the Google CFE metric has been aggregated. We will continue to add new data for each year as we make progress. Note that the Google CFE metric will only be included for the years that the Cloud Region or Data Center was operational. For example, the Data Center in Denmark came online in 2020, so the Google CFE data for that region starts in 2020 (and there is no data for Denmark for 2019)."
- name: "cfe_region"
type: "STRING"
mode: "REQUIRED"
description: "The regional boundary of the electric grid we consider when calculating the Google CFE score. For most of the world, the CFE region is defined as the country. However, for countries that have distinct grids, such as the US, there are multiple CFE regions, which are defined by the balancing authority."
- name: "zone_id"
type: "STRING"
mode: "REQUIRED"
description: "This is the ID associated with the CFE Region based on Tomorrow's ElectricityMap API definition. (http://static.electricitymap.org/api/docs/index.html#zones)"
- name: "cloud_region"
type: "STRING"
mode: "REQUIRED"
description: "The Google Cloud Region that is mapped to the CFE region. For Google Data Centers that are not Cloud Regions, the region will be labeled 'non-cloud-data-center'."
- name: "location"
type: "STRING"
mode: "REQUIRED"
description: "This is the \"friendly name\" of the Cloud Region. For Google Data Centers that are not Cloud regions, the location will be the country (non-US) or the state (US) that the Data Center is located in."
- name: "google_cfe"
type: "FLOAT"
mode: "NULLABLE"
description: "This metric is calculated for every hour in every region and tells us what percentage of the energy we consumed during an hour that is carbon-free. We take into account the carbon-free energy that's already supplied by the grid, in addition to the investments we have made in renewable energy in that location to reach our 24/7 carbon-free objective (https://www.gstatic.com/gumdrop/sustainability/247-carbon-free-energy.pdf). We then aggregate the available average hourly CFE percentage for each region for the year. We do not currently have the hourly energy information available for calculating the metrics for all regions. We anticipate rolling out the calculated metrics using hourly data to regions as the data becomes available."

graph_paths:
- "cfe_gcs_to_bq"