Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Performance improvement rescaleClusterPeriods #79

Merged
merged 7 commits into from Aug 25, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
2 changes: 1 addition & 1 deletion setup.py
Expand Up @@ -8,7 +8,7 @@

setuptools.setup(
name="tsam",
version="2.3.0",
version="2.3.1",
author="Leander Kotzur, Maximilian Hoffmann",
author_email="leander.kotzur@googlemail.com, max.hoffmann@fz-juelich.de",
description="Time series aggregation module (tsam) to create typical periods",
Expand Down
25 changes: 13 additions & 12 deletions tsam/timeseriesaggregation.py
Expand Up @@ -828,18 +828,19 @@ def _rescaleClusterPeriods(self, clusterOrder, clusterPeriods, extremeClusterIdx
series, without changing the values of the extremePeriods.
"""
weightingVec = pd.Series(self._clusterPeriodNoOccur).values
typicalPeriods = pd.DataFrame(
clusterPeriods, columns=self.normalizedPeriodlyProfiles.columns
)
typicalPeriods = pd.concat([
pd.Series(s, index=self.normalizedPeriodlyProfiles.columns)
for s in self.clusterPeriods
], axis=1).T
idx_wo_peak = np.delete(typicalPeriods.index, extremeClusterIdx)
for column in self.timeSeries.columns:
diff = 1
sum_raw = self.normalizedPeriodlyProfiles[column].sum().sum()
sum_peak = sum(
sum_peak = np.sum(
weightingVec[extremeClusterIdx]
* typicalPeriods[column].loc[extremeClusterIdx, :].sum(axis=1)
)
sum_clu_wo_peak = sum(
sum_clu_wo_peak = np.sum(
weightingVec[idx_wo_peak]
* typicalPeriods[column].loc[idx_wo_peak, :].sum(axis=1)
)
Expand Down Expand Up @@ -869,13 +870,12 @@ def _rescaleClusterPeriods(self, clusterOrder, clusterPeriods, extremeClusterIdx
)

# reset values higher than the upper sacle or less than zero
typicalPeriods[column][typicalPeriods[column] > scale_ub] = scale_ub
typicalPeriods[column][typicalPeriods[column] < 0.0] = 0.0
typicalPeriods[column].clip(lower=0, upper=scale_ub, inplace=True)

typicalPeriods[column] = typicalPeriods[column].fillna(0.0)
typicalPeriods[column].fillna(0.0, inplace=True)

# calc new sum and new diff to orig data
sum_clu_wo_peak = sum(
sum_clu_wo_peak = np.sum(
weightingVec[idx_wo_peak]
* typicalPeriods[column].loc[idx_wo_peak, :].sum(axis=1)
)
Expand Down Expand Up @@ -1066,9 +1066,10 @@ def createTypicalPeriods(self):
)

# put the clustered data in pandas format and scale back
self.normalizedTypicalPeriods = pd.DataFrame(
self.clusterPeriods, columns=self.normalizedPeriodlyProfiles.columns
).stack(level="TimeStep")
self.normalizedTypicalPeriods = pd.concat([
pd.Series(s, index=self.normalizedPeriodlyProfiles.columns)
for s in self.clusterPeriods
], axis=1).unstack("TimeStep").T

if self.segmentation:
from tsam.utils.segmentation import segmentation
Expand Down