/
project_resstock_national.yml
68 lines (61 loc) · 2.42 KB
/
project_resstock_national.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
schema_version: 0.2
stock_type: residential
buildstock_directory: ../resstock # Relative to this file or absolute
project_directory: project_national # Relative to buildstock_directory
output_directory: ../national_test_outputs
weather_files_url: https://data.nrel.gov/system/files/128/ResStock_TMY3.zip
# weather_files_path: ../ResStock_TMY3.zip # Relative to this file or absolute path to zipped weather files
baseline:
n_datapoints: 4
n_buildings_represented: 133172057 # Total number of residential dwelling units in contiguous United States, including unoccupied units, resulting from acensus tract level query of ACS 5-yr 2016 (i.e. 2012-2016), using this script: https://github.com/NREL/resstock-estimation/blob/master/sources/spatial/tsv_maker.py.
sampling_algorithm: quota # The default resstock sampling algorithm - use precomputed if using the precomputed_sample option
upgrades:
- upgrade_name: Triple-Pane Windows
options:
- option: Windows|Low-E, Triple, Non-metal, Air, L-Gain
# apply_logic:
costs:
- value: 45.77
multiplier: Window Area (ft^2)
lifetime: 30
timeseries_csv_export:
reporting_frequency: Hourly
include_enduse_subcategories: true
# downselect: # Uncomment and set specify logic you you want to downselect to a subset of the building stock
# resample: true
# logic:
# - Geometry Building Type RECS|Single-Family Detached
# - Vacancy Status|Occupied
eagle:
n_jobs: 4
minutes_per_sim: 30
account: enduse
sampling:
time: 20
postprocessing:
time: 60
n_workers: 3
# aws:
# # The job_identifier should be unique, start with alpha, not include dashes, and limited to 10 chars or data loss can occur
# job_identifier: test_proj
# s3:
# bucket: resbldg-datasets
# prefix: testing/user_test
# emr:
# slave_instance_count: 1
# region: us-west-2
# use_spot: true
# batch_array_size: 10
# # To receive email updates on job progress accept the request to receive emails that will be sent from Amazon
# notifications_email: user@nrel.gov
# postprocessing:
# aggregate_timeseries: true
# aws:
# region_name: 'us-west-2'
# s3:
# bucket: resbldg-datasets
# prefix: resstock-athena/calibration_runs_new
# athena:
# glue_service_role: service-role/AWSGlueServiceRole-default
# database_name: testing
# max_crawling_time: 300 #time to wait for the crawler to complete before aborting it