Skip to content

Commit

Permalink
Merge pull request #326 from NREL/integration-tests
Browse files Browse the repository at this point in the history
Add Integration Tests
  • Loading branch information
nmerket committed Nov 10, 2022
2 parents 8bb4e79 + 563b9e7 commit 2757065
Show file tree
Hide file tree
Showing 4 changed files with 130 additions and 19 deletions.
58 changes: 40 additions & 18 deletions .github/workflows/ci.yml
@@ -1,60 +1,82 @@
name: BuildStockBatch Tests
on: [pull_request]
on:
push:
branches:
- develop
pull_request:
types:
- synchronize
- opened
jobs:
build:
tests:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ['3.8', '3.9', '3.10']
name: Tests - Python ${{ matrix.python-version }}
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
- uses: actions/checkout@v3
with:
path: buildstockbatch
- uses: actions/checkout@v3
with:
repository: NREL/resstock
path: resstock
ref: develop
- uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Download weather
run: |
mkdir weather
cd weather
wget --quiet https://data.nrel.gov/system/files/156/BuildStock_TMY3_FIPS.zip
- name: Install buildstockbatch
run: |
cd buildstockbatch
python -m pip install --progress-bar off --upgrade pip
pip install .[dev] --progress-bar off
- name: Linting
run: |
cd buildstockbatch
# stop the build if there are Python syntax errors or undefined names
flake8 buildstockbatch --count --select=E9,F63,F7,F82 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 buildstockbatch --count --max-line-length=127 --statistics
- name: Run PyTest and Coverage
run: |
cd buildstockbatch
pytest --junitxml=coverage/junit.xml --cov=buildstockbatch --cov-report=xml:coverage/coverage.xml --cov-report=html:coverage/htmlreport
- name: Test Report
uses: mikepenz/action-junit-report@v2.9.1
if: ${{ matrix.python-version == '3.9' }}
uses: mikepenz/action-junit-report@v3.5.2
if: ${{ matrix.python-version == '3.10' }}
with:
report_paths: coverage/junit.xml
report_paths: buildstockbatch/coverage/junit.xml
check_name: Testing Report
fail_on_failure: true
- name: Save Coverage Report
uses: actions/upload-artifact@v2
if: ${{ matrix.python-version == '3.9' }}
uses: actions/upload-artifact@v3
if: ${{ matrix.python-version == '3.10' }}
with:
name: coverage-report-html
path: coverage/htmlreport/
path: buildstockbatch/coverage/htmlreport/
- name: Report coverage to PR
uses: 5monkeys/cobertura-action@v12
if: ${{ matrix.python-version == '3.9' }}
uses: 5monkeys/cobertura-action@v13
if: ${{ matrix.python-version == '3.10' }}
with:
path: coverage/coverage.xml
path: buildstockbatch/coverage/coverage.xml
repo_token: ${{ secrets.GITHUB_TOKEN }}
minimum_coverage: 24
minimum_coverage: 33
fail_below_threshold: true
- name: Build documentation
if: ${{ matrix.python-version == '3.9' }}
if: ${{ matrix.python-version == '3.10' }}
run: |
cd docs
cd buildstockbatch/docs
make html SPHINXOPTS="-W --keep-going -n"
- name: Save Docs
uses: actions/upload-artifact@v2
if: ${{ matrix.python-version == '3.9' }}
uses: actions/upload-artifact@v3
if: ${{ matrix.python-version == '3.10' }}
with:
name: documentation
path: docs/_build/html/
2 changes: 1 addition & 1 deletion buildstockbatch/postprocessing.py
Expand Up @@ -504,7 +504,7 @@ def combine_results(fs, results_dir, cfg, do_timeseries=True):
logger.info(f'Writing {csv_filename}')
with fs.open(csv_filename, 'wb') as f:
with gzip.open(f, 'wt', encoding='utf-8') as gf:
df.to_csv(gf, index=True, line_terminator='\n')
df.to_csv(gf, index=True, lineterminator='\n')

# Write Parquet
if upgrade_id == 0:
Expand Down
83 changes: 83 additions & 0 deletions buildstockbatch/test/test_integration.py
@@ -0,0 +1,83 @@
import os
import pandas as pd
import pathlib
import pytest
import shutil

from buildstockbatch.localdocker import LocalDockerBatch

resstock_directory = pathlib.Path(
os.environ.get("RESSTOCK_DIR", pathlib.Path(__file__).resolve().parent.parent.parent.parent / "resstock")
)
resstock_required = pytest.mark.skipif(
not resstock_directory.exists(),
reason="ResStock checkout is not found"
)


@pytest.mark.parametrize("project_filename", [
resstock_directory / "project_national" / "national_baseline.yml",
resstock_directory / "project_national" / "national_upgrades.yml",
resstock_directory / "project_testing" / "testing_baseline.yml",
resstock_directory / "project_testing" / "testing_upgrades.yml",
], ids=lambda x: x.stem)
@resstock_required
def test_resstock_local_batch(project_filename, mocker):
LocalDockerBatch.validate_project(str(project_filename))
batch = LocalDockerBatch(str(project_filename))

# Get the number of upgrades
n_upgrades = len(batch.cfg.get("upgrades", []))
# Limit the number of upgrades to 2 to reduce simulation time
if n_upgrades > 2:
batch.cfg["upgrades"] = batch.cfg["upgrades"][0:2]
n_upgrades = 2

# Modify the number of datapoints so we're not here all day.
if n_upgrades == 0:
n_datapoints = 4
else:
n_datapoints = 2
batch.cfg["sampler"]["args"]["n_datapoints"] = n_datapoints

local_weather_file = resstock_directory.parent / "weather" / batch.cfg["weather_files_url"].split("/")[-1]
if local_weather_file.exists():
del batch.cfg["weather_files_url"]
batch.cfg["weather_files_path"] = str(local_weather_file)

batch.run_batch()

# Make sure all the files are there
out_path = pathlib.Path(batch.output_dir)
simout_path = out_path / "simulation_output"
assert (simout_path / "results_job0.json.gz").exists()
assert (simout_path / "simulations_job0.tar.gz").exists()

for upgrade_id in range(0, n_upgrades + 1):
for bldg_id in range(1, n_datapoints + 1):
assert (simout_path / "timeseries" / f"up{upgrade_id:02d}" / f"bldg{bldg_id:07d}.parquet").exists()

batch.process_results()

assert not (simout_path / "timeseries").exists()
assert not (simout_path / "results_job0.json.gz").exists()
assert (simout_path / "simulations_job0.tar.gz").exists()
base_pq = out_path / "parquet" / "baseline" / "results_up00.parquet"
assert base_pq.exists()
base = pd.read_parquet(base_pq, columns=["completed_status"])
assert (base["completed_status"] == "Success").all()
assert base.shape[0] == n_datapoints
ts_pq_path = out_path / "parquet" / "timeseries"
for upgrade_id in range(0, n_upgrades + 1):
assert (ts_pq_path / f"upgrade={upgrade_id}" / "group0.parquet").exists()
assert (out_path / "results_csvs" / f"results_up{upgrade_id:02d}.csv.gz").exists()
if upgrade_id >= 1:
upg_pq = out_path / "parquet" / "upgrades" / f"upgrade={upgrade_id}" / f"results_up{upgrade_id:02d}.parquet"
assert upg_pq.exists()
upg = pd.read_parquet(upg_pq, columns=["completed_status"])
assert (upg["completed_status"] == "Success").all()
assert upg.shape[0] == n_datapoints
assert (ts_pq_path / "_common_metadata").exists()
assert (ts_pq_path / "_metadata").exists()

shutil.rmtree(out_path)
6 changes: 6 additions & 0 deletions docs/changelog/changelog_dev.rst
Expand Up @@ -20,3 +20,9 @@ Development Changelog
:pullreq: 323

Updates and simplifies python dependencies.

.. change::
:tags: general
:pullreq: 326

Adds some integration tests with the lastest ResStock.

0 comments on commit 2757065

Please sign in to comment.