Skip to content

Commit

Permalink
Merge pull request #187 from NREL/sampler_workflow_refactor
Browse files Browse the repository at this point in the history
Sampler and Workflow Refactor
  • Loading branch information
nmerket committed Feb 10, 2021
2 parents 288a06c + c671702 commit 86067e7
Show file tree
Hide file tree
Showing 58 changed files with 1,785 additions and 1,271 deletions.
33 changes: 18 additions & 15 deletions aws_demo_project.yml
@@ -1,33 +1,36 @@
schema_version: 0.2
stock_type: residential
schema_version: '0.3'
buildstock_directory: ../resstock # Relative to this file or absolute
project_directory: project_national # Relative to buildstock_directory
output_directory: ../demo_test_outputs
weather_files_url: https://data.nrel.gov/system/files/128/ResStock_TMY3.zip
weather_files_url: https://data.nrel.gov/system/files/156/BuildStock_TMY3_FIPS.zip

baseline:
n_datapoints: 4 # Comment this line out if using a custom buildstock csv file
n_buildings_represented: 133172057 # Total number of residential dwelling units in contiguous United States, including unoccupied units, resulting from acensus tract level query of ACS 5-yr 2016 (i.e. 2012-2016), using this script: https://github.com/NREL/resstock-estimation/blob/master/sources/spatial/tsv_maker.py.
sampling_algorithm: quota # The default resstock sampling algorithm - use precomputed if using the precomputed_sample option

sampler:
type: residential_quota # change to residential_quota_downselect to do downselect
args:
n_datapoints: 4
# logic:
# - Geometry Building Type RECS|Single-Family Detached
# - Vacancy Status|Occupied
# resample: false # Uncomment and specify logic you you want to downselect to a subset of the building stock

workflow_generator:
type: residential_default
args:
timeseries_csv_export:
reporting_frequency: Hourly
include_enduse_subcategories: true

upgrades:
- upgrade_name: Triple-Pane Windows
options:
- option: Windows|Low-E, Triple, Non-metal, Air, L-Gain
# apply_logic:
costs:
- value: 45.77
multiplier: Window Area (ft^2)
lifetime: 30
timeseries_csv_export:
reporting_frequency: Hourly
include_enduse_subcategories: true

# downselect: # Uncomment and set specify logic you you want to downselect to a subset of the building stock
# resample: true
# logic:
# - Geometry Building Type RECS|Single-Family Detached
# - Vacancy Status|Occupied

aws:
# The job_identifier must be unique, start with alpha, not include dashes, and limited to 10 chars
Expand Down
9 changes: 6 additions & 3 deletions buildstockbatch/__version__.py
@@ -1,9 +1,12 @@
import datetime as dt


__title__ = 'buildstockbatch'
__description__ = 'Executing BuildStock projects on batch infrastructure.'
__url__ = 'http://github.com/NREL/buildstockbatch'
__version__ = '0.19'
__schema_version__ = '0.2'
__version__ = '0.20'
__schema_version__ = '0.3'
__author__ = 'Noel Merket'
__author_email__ = 'noel.merket@nrel.gov'
__license__ = 'BSD-3'
__copyright__ = 'Copyright 2020 The Alliance for Sustainable Energy'
__copyright__ = 'Copyright {} The Alliance for Sustainable Energy'.format(dt.date.today().year)
12 changes: 5 additions & 7 deletions buildstockbatch/aws/aws.py
Expand Up @@ -40,7 +40,7 @@
from buildstockbatch.base import ValidationError
from buildstockbatch.aws.awsbase import AwsJobBase
from buildstockbatch import postprocessing
from ..utils import log_error_details
from ..utils import log_error_details, get_project_configuration

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -1661,7 +1661,7 @@ def __init__(self, project_filename):

@staticmethod
def validate_instance_types(project_file):
cfg = AwsBatch.get_project_configuration(project_file)
cfg = get_project_configuration(project_file)
aws_config = cfg['aws']
boto3_session = boto3.Session(region_name=aws_config['region'])
ec2 = boto3_session.client('ec2')
Expand Down Expand Up @@ -1772,10 +1772,7 @@ def run_batch(self):
"""

# Generate buildstock.csv
if 'downselect' in self.cfg:
buildstock_csv_filename = self.downselect()
else:
buildstock_csv_filename = self.run_sampling()
buildstock_csv_filename = self.sampler.run_sampling()

# Compress and upload assets to S3
with tempfile.TemporaryDirectory() as tmpdir, tempfile.TemporaryDirectory() as tmp_weather_dir:
Expand Down Expand Up @@ -1837,6 +1834,7 @@ def run_batch(self):
with open(job_json_filename, 'w') as f:
json.dump({
'job_num': i,
'n_datapoints': n_datapoints,
'batch': batch,
}, f, indent=4)
array_size = i
Expand Down Expand Up @@ -1981,7 +1979,7 @@ def run_job(cls, job_id, bucket, prefix, job_name, region):
sim_id = f'bldg{building_id:07d}up{upgrade_id:02d}'

# Create OSW
osw = cls.create_osw(cfg, sim_id, building_id, upgrade_idx)
osw = cls.create_osw(cfg, jobs_d['n_datapoints'], sim_id, building_id, upgrade_idx)
with open(os.path.join(sim_dir, 'in.osw'), 'w') as f:
json.dump(osw, f, indent=4)

Expand Down

0 comments on commit 86067e7

Please sign in to comment.