Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix scheduling with efficiencies #520

Merged
merged 6 commits into from Nov 2, 2022
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
4 changes: 3 additions & 1 deletion flexmeasures/api/v1_3/tests/test_api_v1_3.py
Expand Up @@ -112,7 +112,9 @@ def test_post_udi_event_and_get_device_message(
# check targets, if applicable
if "targets" in message:
start_soc = message["value"] / 1000 # in MWh
soc_schedule = integrate_time_series(consumption_schedule, start_soc, 6)
soc_schedule = integrate_time_series(
consumption_schedule, start_soc, decimal_precision=6
)
print(consumption_schedule)
print(soc_schedule)
for target in message["targets"]:
Expand Down
4 changes: 3 additions & 1 deletion flexmeasures/api/v3_0/tests/test_sensor_schedules.py
Expand Up @@ -90,7 +90,9 @@ def test_trigger_and_get_schedule(
# check targets, if applicable
if "targets" in message:
start_soc = message["soc-at-start"] / 1000 # in MWh
soc_schedule = integrate_time_series(consumption_schedule, start_soc, 6)
soc_schedule = integrate_time_series(
consumption_schedule, start_soc, decimal_precision=6
)
print(consumption_schedule)
print(soc_schedule)
for target in message["targets"]:
Expand Down
21 changes: 9 additions & 12 deletions flexmeasures/data/models/planning/solver.py
Expand Up @@ -46,8 +46,8 @@ def device_scheduler( # noqa C901
derivative max: maximum flow (e.g. in MW or boxes/h)
derivative min: minimum flow
derivative equals: exact amount of flow (we do this by clamping derivative min and derivative max)
derivative down efficiency: ratio of downwards flows (flow into EMS : flow out of device)
derivative up efficiency: ratio of upwards flows (flow into device : flow out of EMS)
derivative down efficiency: conversion efficiency of flow out of a device (flow out : stock decrease)
derivative up efficiency: conversion efficiency of flow into a device (stock increase : flow in)
EMS constraints are on an EMS level. Handled constraints (listed by column name):
derivative max: maximum flow
derivative min: minimum flow
Expand Down Expand Up @@ -228,10 +228,12 @@ def device_derivative_up_efficiency(m, d, j):

# Add constraints as a tuple of (lower bound, value, upper bound)
def device_bounds(m, d, j):
"""Apply efficiencies to conversion from flow to stock change and vice versa."""
return (
m.device_min[d, j],
sum(
m.device_power_down[d, k] + m.device_power_up[d, k]
m.device_power_down[d, k] / m.device_derivative_down_efficiency[d, k]
+ m.device_power_up[d, k] * m.device_derivative_up_efficiency[d, k]
for k in range(0, j + 1)
),
m.device_max[d, j],
Expand Down Expand Up @@ -275,12 +277,10 @@ def ems_flow_commitment_equalities(m, j):
)

def device_derivative_equalities(m, d, j):
"""Couple device flows to EMS flows per device, applying efficiencies."""
"""Couple device flows to EMS flows per device."""
return (
0,
m.device_power_up[d, j] / m.device_derivative_up_efficiency[d, j]
+ m.device_power_down[d, j] * m.device_derivative_down_efficiency[d, j]
- m.ems_power[d, j],
m.device_power_up[d, j] + m.device_power_down[d, j] - m.ems_power[d, j],
0,
)

Expand Down Expand Up @@ -321,10 +321,7 @@ def cost_function(m):
planned_costs = value(model.costs)
planned_power_per_device = []
for d in model.d:
planned_device_power = [
model.device_power_down[d, j].value + model.device_power_up[d, j].value
for j in model.j
]
planned_device_power = [model.ems_power[d, j].value for j in model.j]
planned_power_per_device.append(
pd.Series(
index=pd.date_range(
Expand All @@ -335,7 +332,7 @@ def cost_function(m):
)

# model.pprint()
# model.display()
# print(results.solver.termination_condition)
# print(planned_costs)
# model.display()
return planned_power_per_device, planned_costs, results
8 changes: 7 additions & 1 deletion flexmeasures/data/models/planning/tests/test_solver.py
Expand Up @@ -90,7 +90,13 @@ def test_battery_solver_day_2(add_battery_assets, roundtrip_efficiency: float):
soc_max=soc_max,
roundtrip_efficiency=roundtrip_efficiency,
)
soc_schedule = integrate_time_series(schedule, soc_at_start, decimal_precision=6)
soc_schedule = integrate_time_series(
schedule,
soc_at_start,
up_efficiency=roundtrip_efficiency**0.5,
down_efficiency=roundtrip_efficiency**0.5,
decimal_precision=6,
)

with pd.option_context("display.max_rows", None, "display.max_columns", 3):
print(soc_schedule)
Expand Down
31 changes: 24 additions & 7 deletions flexmeasures/utils/calculations.py
@@ -1,6 +1,7 @@
""" Calculations """
from __future__ import annotations

from datetime import timedelta
from typing import Optional

import numpy as np
import pandas as pd
Expand Down Expand Up @@ -37,7 +38,11 @@ def drop_nan_rows(a, b):


def integrate_time_series(
s: pd.Series, s0: float, decimal_precision: Optional[int] = None
series: pd.Series,
s0: float,
up_efficiency: float | pd.Series = 1,
down_efficiency: float | pd.Series = 1,
decimal_precision: int | None = None,
) -> pd.Series:
"""Integrate time series of length n and closed="left" (representing a flow)
to a time series of length n+1 and closed="both" (representing a stock),
Expand All @@ -46,7 +51,7 @@ def integrate_time_series(
Optionally, set a decimal precision to round off the results (useful for tests failing over machine precision).

>>> s = pd.Series([1, 2, 3, 4], index=pd.date_range(datetime(2001, 1, 1, 5), datetime(2001, 1, 1, 6), freq=timedelta(minutes=15), closed="left"))
>>> integrate_time_series(s, 10)
>>> integrate_time_series(series, 10)
Flix6x marked this conversation as resolved.
Show resolved Hide resolved
2001-01-01 05:00:00 10.00
2001-01-01 05:15:00 10.25
2001-01-01 05:30:00 10.75
Expand All @@ -55,19 +60,31 @@ def integrate_time_series(
Freq: D, dtype: float64

>>> s = pd.Series([1, 2, 3, 4], index=pd.date_range(datetime(2001, 1, 1, 5), datetime(2001, 1, 1, 7), freq=timedelta(minutes=30), closed="left"))
>>> integrate_time_series(s, 10)
Flix6x marked this conversation as resolved.
Show resolved Hide resolved
>>> integrate_time_series(series, 10)
2001-01-01 05:00:00 10.0
2001-01-01 05:30:00 10.5
2001-01-01 06:00:00 11.5
2001-01-01 06:30:00 13.0
2001-01-01 07:00:00 15.0
dtype: float64
"""
resolution = pd.to_timedelta(s.index.freq)
resolution = pd.to_timedelta(series.index.freq)
stock_change = pd.Series(data=np.NaN, index=series.index)
stock_change.loc[series > 0] = series[series > 0] * (
up_efficiency[series > 0]
if isinstance(up_efficiency, pd.Series)
else up_efficiency
)
stock_change.loc[series <= 0] = series[series <= 0] / (
down_efficiency[series <= 0]
if isinstance(down_efficiency, pd.Series)
else down_efficiency
)
int_s = pd.concat(
[
pd.Series(s0, index=pd.date_range(s.index[0], periods=1)),
s.shift(1, freq=resolution).cumsum() * (resolution / timedelta(hours=1))
pd.Series(s0, index=pd.date_range(series.index[0], periods=1)),
stock_change.shift(1, freq=resolution).cumsum()
* (resolution / timedelta(hours=1))
+ s0,
]
)
Expand Down