diff --git a/Readme.md b/Readme.md index b42fc8860..068eb02ef 100644 --- a/Readme.md +++ b/Readme.md @@ -3,6 +3,7 @@ ![lint-and-test](https://github.com/SeitaBV/flexmeasures/workflows/lint-and-test/badge.svg) [![](https://img.shields.io/badge/python-3.6+-blue.svg)](https://www.python.org/downloads/) [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) +[![Documentation Status](https://readthedocs.org/projects/flexmeasures/badge/?version=latest)](https://flexmeasures.readthedocs.io/en/latest/?badge=latest) The *FlexMeasures Platform* is a tool for scheduling flexible actions for energy assets. For this purpose, it performs monitoring, forecasting and scheduling services. diff --git a/documentation/changelog.rst b/documentation/changelog.rst index f3b76b308..52a2e033e 100644 --- a/documentation/changelog.rst +++ b/documentation/changelog.rst @@ -8,8 +8,9 @@ v0.2.4 | March XX, 2021 New features ----------- -* FlexMeasures can be installed with `pip` and its CLI commands can be run with `flexmeasures` [see `PR #54 `_] +* FlexMeasures can be installed with ``pip`` and its CLI commands can be run with ``flexmeasures`` [see `PR #54 `_] * Optionally setting recording time when posting data [see `PR #41 `_] +* Add assets and weather sensors with CLI commands [see `PR #74 `_] Bugfixes -------- diff --git a/documentation/cli/change_log.rst b/documentation/cli/change_log.rst new file mode 100644 index 000000000..1f3a5a3d8 --- /dev/null +++ b/documentation/cli/change_log.rst @@ -0,0 +1,13 @@ +.. _cli-changelog: + +********************** +FlexMeasures CLI Changelog +********************** + + +since v0.2.4 | March XX, 2021 +===================== + +* Refactor CLI into the main groups ``add``, ``delete``, ``jobs`` and ``db-ops`` +* Add ``flexmeasures add asset``, ``flexmeasures add user`` and ``flexmeasures add weather-sensor`` +* split the ``populate-db`` command into ``flexmeasures add structure`` and ``flexmeasures add forecasts`` \ No newline at end of file diff --git a/documentation/cli/commands.rst b/documentation/cli/commands.rst new file mode 100644 index 000000000..69dbefe07 --- /dev/null +++ b/documentation/cli/commands.rst @@ -0,0 +1,66 @@ +.. _cli: + +Command Line Interface (CLI) +============================= + +FlexMeasures comes with a command-line utility, which helps to manage data. +Below, we list all available commands. + +Each command has more extensive documentation if you call it with ``--help``. + +We keep track of changes to these commands in :ref:`cli-changelog`. +You can also get the current overview over the commands you have available by: + +.. code-block:: + + flexmeasures --help + +This also shows admin commands made available through Flask and installed extensions (such as `Flask-Security `_ and `Flask-Migrate `_), +of which some are referred to in this documentation. + + +``add`` - Add data +-------------- + +================================================= ======================================= +``flexmeasures add structure`` Initialize structural data like asset types, + market types and weather sensor types. +``flexmeasures add user`` Create a FlexMeasures user. +``flexmeasures add asset`` Create a new asset. +``flexmeasures add weather-sensor`` Add a weather sensor. +``flexmeasures add external-weather-forecasts`` Collect weather forecasts from the DarkSky API. +``flexmeasures add forecasts`` Create forecasts. +================================================= ======================================= + + +``delete`` - Delete data +-------------- + +================================================= ======================================= +``flexmeasures delete structure`` Delete all structural (non time-series) data like assets (types), + markets (types) and weather sensors (types) and users. +``flexmeasures delete user`` Delete a user & also their assets and power measurements. +``flexmeasures delete measurements`` Delete measurements (with horizon <= 0). +``flexmeasures delete prognoses`` Delete forecasts and schedules (forecasts > 0). +================================================= ======================================= + + +``jobs`` - Job queueing +-------------- + +================================================= ======================================= +``flexmeasures jobs run-worker`` Start a worker process for forecasting and/or scheduling jobs. +``flexmeasures jobs clear-queue`` Clear a job queue. +================================================= ======================================= + + +``db-ops`` - Operations on the whole database +-------------- + +================================================= ======================================= +``flexmeasures db-ops dump`` Create a dump of all current data (using `pg_dump`). +``flexmeasures db-ops load`` Load backed-up contents (see `db-ops save`), run `reset` first. +``flexmeasures db-ops reset`` Reset database data and re-create tables from data model. +``flexmeasures db-ops restore`` Restore the dump file, see `db-ops dump` (run `reset` first). +``flexmeasures db-ops save`` Backup db content to files. +================================================= ======================================= \ No newline at end of file diff --git a/documentation/configuration.rst b/documentation/configuration.rst index a0b3eb953..e737e0780 100644 --- a/documentation/configuration.rst +++ b/documentation/configuration.rst @@ -13,7 +13,7 @@ Recommended settings (e.g. mail, redis) are marked by one star (*). * in the user's home directory (e.g. ``~/.flexmeasures.cfg`` on Unix). In this case, note the dot at the beginning of the filename! -* in the apps's instance directory (e.g. ``/path/to/your/flexmeasures/code/instance/flexmeasures.cfg``\ ). The path to that instance directory is shown to you by running flexmeasures (e.g. ``flexmeasures run``\ ) with required settings missing or otherwise by running ``flexmeasures shell``. +* in the app's instance directory (e.g. ``/path/to/your/flexmeasures/code/instance/flexmeasures.cfg``\ ). The path to that instance directory is shown to you by running flexmeasures (e.g. ``flexmeasures run``\ ) with required settings missing or otherwise by running ``flexmeasures shell``. Basic functionality ------------------- @@ -85,7 +85,7 @@ Default: ``False`` RQ_DASHBOARD_POLL_INTERVAL ^^^^^^^^^^^^^^^^^^^^^^^^^^ -Interval in which viewing the queues dashboard refreshes itself, in miliseconds. +Interval in which viewing the queues dashboard refreshes itself, in milliseconds. Default: ``3000`` (3 seconds) @@ -121,9 +121,10 @@ DARK_SKY_API_KEY Token for accessing the DarkSky weather forecasting service. -.. note:: DarkSky will be soon (Aug 1, 2021) become non-public, so thay are not giving out new tokens. We'll use another service soon, `see this issue `_. - -This is unfortunate. In the meantime, if you can't find anybody lending their token, you can add weather forecasts to the FlexMeasures db yourself. +.. note:: DarkSky will soon become non-public (Aug 1, 2021), so they are not giving out new tokens. + We'll use another service soon (`see this issue `_). + This is unfortunate. + In the meantime, if you can't find anybody lending their token, consider posting weather forecasts to the FlexMeasures database yourself. Default: ``None`` diff --git a/documentation/dev/data.rst b/documentation/dev/data.rst index f385ec442..39a5ca81f 100644 --- a/documentation/dev/data.rst +++ b/documentation/dev/data.rst @@ -43,6 +43,7 @@ On Windows: * Add the lib and bin directories to your Windows path: http://bobbyong.com/blog/installing-postgresql-on-windoes/ * ``conda install psycopg2`` + Make sure postgres represents datetimes in UTC timezone ^^^^^^^^^^^^^ @@ -60,6 +61,7 @@ Find the ``timezone`` setting and set it to 'UTC'. Then restart the postgres server. + Setup the "flexmeasures" Unix user ^^^^^^^^^^^^^ @@ -142,11 +144,13 @@ Write: into the config file you are using, e.g. ~/flexmeasures.cfg + Get structure (and some data) into place ^^^^^^^^^^^^^ You need data to enjoy the benefits of FlexMeasures or to develop features for it. In this section, there are some ways to get started. + Import from another database """""""""""""""""""""""""""""" @@ -156,7 +160,7 @@ On the to-be-exported database: .. code-block:: bash - flask db-dump + flexmeasures db-ops dump .. note:: Only the data gets dumped here. @@ -165,14 +169,14 @@ Then, we create the structure in our database anew, based on the data model give .. code-block:: bash - flexmeasures db-reset + flexmeasures db-ops reset Then we import the data dump we made earlier: .. code-block:: bash - flask db-restore + flask db-ops restore A potential ``alembic_version`` error should not prevent other data tables from being restored. @@ -196,14 +200,14 @@ You can create users with the ``new-user`` command. Check it out: .. code-block:: bash - flexmeasures new-user --help + flexmeasures add user --help You can create some pre-determined asset types and data sources with this command: .. code-block:: bash - flexmeasures db-populate --structure + flexmeasures add structure .. todo:: We should instead offer CLI commands to be able to create asset types as needed. @@ -217,16 +221,22 @@ You can create forecasts for your existing metered data with this command: .. code-block:: bash - flexmeasures db-populate --forecasts + flexmeasures add forecasts -Check out it's ``--help`` content to learn more. You can set which assets and which time window you want to forecast. At the time of writing, the forecasts horizons are fixed to 1, 6, 24 and 48 hours. Of course, making forecasts takes a while for a larger dataset. +Check out it's ``--help`` content to learn more. You can set which assets and which time window you want to forecast. Of course, making forecasts takes a while for a larger dataset. +You can also simply queue a job with this command (and run a worker to process the :ref:`redis-queue`). -Just to note: There is also a command to get rid of data: +Just to note, there are also commands to get rid of data, such as: .. code-block:: bash - flexmeasures db-depopulate --structure --data --forecasts + flexmeasures delete structure + flexmeasures delete measurements + flexmeasures delete forecasts + +Check out the :ref:`cli` documentation for more details. + Visualize the data model @@ -247,7 +257,8 @@ Maintenance Maintenance is supported with the alembic tool. It reacts automatically to almost all changes in the SQLAlchemy code. With alembic, multiple databases, -e.g. dev, staging and production can be kept in sync. +such as development, staging and production databases can be kept in sync. + Make first migration ^^^^^^^^^^^^^^^^^^^^^^^ @@ -345,7 +356,10 @@ It is really useful (and therefore an industry standard) to bundle certain datab Please see the package ``flexmeasures.data.transactional`` for details on how a FlexMeasures developer should make use of this concept. If you are writing a script or a view, you will find there the necessary structural help to bundle your work in a transaction. -Redis and redis queue + +.. _redis-queue: + +Redis queue ----------------------- FlexMeasures supports jobs (e.g. forecasting) running asynchronously to the main FlexMeasures application using `Redis Queue `_. @@ -356,17 +370,18 @@ Forecasting jobs are usually created (and enqueued) when new data comes in via t .. code-block:: bash - flexmeasures run_worker --queue forecasting + flexmeasures jobs run_worker --queue forecasting You should be able to run multiple workers in parallel, if necessary. You can add the ``--name`` argument to keep them a bit more organized. The FlexMeasures unit tests use fakeredis to simulate this task queueing, with no configuration required. + Inspect the queue and jobs ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -The first option to inspect the state of the ``forecasting`` queue should be via the formiddable `RQ dashboard `_. If you have admin rights, you can access it at ``your-flexmeasures-url/rq/``\ , so for instance ``http://localhost:5000/rq/``. You can also start RQ dashboard yourself (but you need to know the redis server credentials): +The first option to inspect the state of the ``forecasting`` queue should be via the formidable `RQ dashboard `_. If you have admin rights, you can access it at ``your-flexmeasures-url/rq/``\ , so for instance ``http://localhost:5000/rq/``. You can also start RQ dashboard yourself (but you need to know the redis server credentials): .. code-block:: bash diff --git a/documentation/getting-started.rst b/documentation/getting-started.rst index 0acb2f422..ca95bc513 100644 --- a/documentation/getting-started.rst +++ b/documentation/getting-started.rst @@ -99,20 +99,53 @@ FlexMeasures is a web-based platform, so we need a user account: .. code-block:: - flexmeasures new-user --username --email --roles=admin + flexmeasures add user --username --email --roles=admin * This will ask you to set a password for the user. * Giving the first user the ``admin`` role is probably what you want. + Add structure ^^^^^^^^^^^^^ -Populate the database with some standard energy asset types: +Populate the database with some standard energy asset types, weather sensor types and a dummy market: + +.. code-block:: + + flexmeasures add structure + + +Add your first weather sensor +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Weather plays a role for almost all use cases. +FlexMeasures supports a few weather sensor types out of the box ("temperature", "wind_speed" and "radiation"), but you need to decide which ones you need and where they are located. +Let's use the ``flexmeasures`` :ref:`cli` to add one: .. code-block:: - flexmeasures db-populate --structure + flexmeasures add weather-sensor --name "my rooftop thermometer" --weather-sensor-type-name temperature --unit °C --event-resolution 15 --latitude 33 --longitude 2.4 + + +Add your first asset +^^^^^^^^^^^^^^^^^^^^ + +There are three ways to add assets: + +Head over to ``http://localhost:5000/assets`` and add a new asset there. + +Or, use the ``flexmeasures`` :ref:`cli`: + +.. code-block:: + + flexmeasures add asset --name "my basement battery pack" --asset-type-name battery --capacity-in-MW 30 --event-resolution 2 --latitude 65 --longitude 123.76 --owner-id 1 + +Here, I left out the ``--market-id`` parameter, because in this quickstart scenario I'm fine with the dummy market created with ``flexmeasures add structure`` above. +For the ownership, I got my user ID from the output of ``flexmeasures add user`` above, or I can browse to `FlexMeasures' user listing `_ and hover over my username. + +Finally, you can also use the `POST /api/v2_0/assets `_ endpoint in the FlexMeasures API to create an asset. + Run FlexMeasures ^^^^^^^^^^^^^^^^ @@ -126,19 +159,11 @@ It's finally time to start running FlexMeasures: (This might print some warnings, see the next section where we go into more detail) .. note:: In a production context, you shouldn't run a script - hand the ``app`` object to a WSGI process, as your platform of choice describes. -Often, that requires a WSGI script. We provide an example WSGI script in :ref:`continuous_integration`. + Often, that requires a WSGI script. We provide an example WSGI script in :ref:`continuous_integration`. You can visit ``http://localhost:5000`` now to see if the app's UI works. When you see the dashboard, the map will not work. For that, you'll need to get your :ref:`mapbox_access_token` and add it to your config file. -Add your first asset -^^^^^^^^^^^^^^^^^^^^ - -Head over to ``http://localhost:5000/assets`` and add a new asset there. - -.. note:: `issue 57 `_ should create a CLI function for this. - -.. note:: You can also use the `POST /api/v2_0/assets `_ endpoint in the FlexMeasures API to create an asset. Add data ^^^^^^^^ @@ -147,11 +172,11 @@ You can use the `POST /api/v2_0/postMeterData `_ should create a CLI function for adding a lot of data at once, from a CSV dataset. -Also, you can add forecasts for your meter data with the ``db_populate`` command, here is an example: +Also, you can add forecasts for your meter data with the ``flexmeasures add`` command, here is an example: .. code-block:: - flexmeasures db-populate --forecasts --from-date 2020-03-08 --to-date 2020-04-08 --asset-type Asset --asset my-solar-panel + flexmeasures add forecasts --from-date 2020-03-08 --to-date 2020-04-08 --asset-type Asset --asset my-solar-panel .. note:: You can also use the API to send forecast data. @@ -186,12 +211,13 @@ More information (e.g. for installing on Windows) on `the Cbc website `_). Preparing the job queue database and start workers diff --git a/documentation/index.rst b/documentation/index.rst index 4023a9ae6..8f91e90e1 100644 --- a/documentation/index.rst +++ b/documentation/index.rst @@ -72,6 +72,13 @@ The platform operator of FlexMeasures can be an Aggregator. api/v1 api/change_log +.. toctree:: + :caption: The CLI + :maxdepth: 1 + + cli/commands + cli/change_log + .. toctree:: :caption: Developers diff --git a/flexmeasures/api/__init__.py b/flexmeasures/api/__init__.py index ae38b31ce..ef838271c 100644 --- a/flexmeasures/api/__init__.py +++ b/flexmeasures/api/__init__.py @@ -1,5 +1,4 @@ from flask import Flask, Blueprint, request -from flask_marshmallow import Marshmallow from flask_security.utils import verify_password from flask_json import as_json from flask_login import current_user @@ -14,8 +13,6 @@ # The api blueprint. It is registered with the Flask app (see app.py) flexmeasures_api = Blueprint("flexmeasures_api", __name__) -ma: Marshmallow = Marshmallow() - @flexmeasures_api.route("/requestAuthToken", methods=["POST"]) @as_json @@ -83,8 +80,6 @@ def get_versions() -> dict: def register_at(app: Flask): """This can be used to register this blueprint together with other api-related things""" - global ma - ma.init_app(app) # handle API specific errors app.register_error_handler(FMValidationError, validation_error_handler) diff --git a/flexmeasures/api/play/implementations.py b/flexmeasures/api/play/implementations.py index 54028bd6a..17edcc957 100644 --- a/flexmeasures/api/play/implementations.py +++ b/flexmeasures/api/play/implementations.py @@ -10,8 +10,8 @@ ) from flexmeasures.data.config import db from flexmeasures.data.scripts.data_gen import ( - depopulate_data, - depopulate_forecasts, + depopulate_measurements, + depopulate_prognoses, depopulate_structure, get_affected_classes, load_tables, @@ -52,8 +52,8 @@ def restore_data_response(): # Reset in play mode only (this endpoint should not have been registered otherwise) assert app.config.get("FLEXMEASURES_MODE", "") == "play" if delete_data: - depopulate_forecasts(db) - depopulate_data(db) + depopulate_prognoses(db) + depopulate_measurements(db) if delete_structure: depopulate_structure(db) diff --git a/flexmeasures/api/v2_0/implementations/assets.py b/flexmeasures/api/v2_0/implementations/assets.py index b110306b6..d4fb8f79e 100644 --- a/flexmeasures/api/v2_0/implementations/assets.py +++ b/flexmeasures/api/v2_0/implementations/assets.py @@ -4,60 +4,17 @@ from flask_security import current_user from flask_json import as_json -from marshmallow import ValidationError, validate, validates, fields, validates_schema from sqlalchemy.exc import IntegrityError from webargs.flaskparser import use_args +from marshmallow import fields from flexmeasures.data.services.resources import get_assets -from flexmeasures.data.models.assets import Asset as AssetModel -from flexmeasures.data.models.user import User +from flexmeasures.data.models.assets import Asset as AssetModel, AssetSchema from flexmeasures.data.auth_setup import unauthorized_handler from flexmeasures.data.config import db -from flexmeasures.api import ma from flexmeasures.api.common.responses import required_info_missing -class AssetSchema(ma.SQLAlchemySchema): - class Meta: - model = AssetModel - - @validates("owner_id") - def validate_owner(self, owner_id): - owner = User.query.get(owner_id) - if not owner: - raise ValidationError(f"Owner with id {owner_id} doesn't exist.") - if "Prosumer" not in owner.flexmeasures_roles: - raise ValidationError("Owner must have role 'Prosumer'.") - - # TODO: also validate existence of market and asset type - - @validates_schema(skip_on_field_errors=False) - def validate_soc_constraints(self, data, **kwargs): - if "max_soc_in_mwh" in data and "min_soc_in_mwh" in data: - if data["max_soc_in_mwh"] < data["min_soc_in_mwh"]: - errors = { - "max_soc_in_mwh": "This value must be equal or higher than the minimum soc." - } - raise ValidationError(errors) - - id = ma.auto_field() - name = ma.auto_field(required=True) - display_name = fields.Str(validate=validate.Length(min=4)) - unit = ma.auto_field(required=True) - event_resolution = fields.TimeDelta(required=True, precision="minutes") - capacity_in_mw = fields.Float(required=True, validate=validate.Range(min=0.0001)) - min_soc_in_mwh = fields.Float(validate=validate.Range(min=0)) - max_soc_in_mwh = fields.Float(validate=validate.Range(min=0)) - soc_in_mwh = ma.auto_field() - soc_datetime = ma.auto_field() - soc_udi_event_id = ma.auto_field() - latitude = fields.Float(required=True, validate=validate.Range(min=-90, max=90)) - longitude = fields.Float(required=True, validate=validate.Range(min=-180, max=180)) - asset_type_name = ma.auto_field(required=True) - owner_id = ma.auto_field(required=True) - market_id = ma.auto_field(required=True) - - asset_schema = AssetSchema() assets_schema = AssetSchema(many=True) diff --git a/flexmeasures/api/v2_0/implementations/users.py b/flexmeasures/api/v2_0/implementations/users.py index 57178707b..ddbd7ca78 100644 --- a/flexmeasures/api/v2_0/implementations/users.py +++ b/flexmeasures/api/v2_0/implementations/users.py @@ -9,7 +9,7 @@ from flask_json import as_json from pytz import all_timezones -from flexmeasures.api import ma +from flexmeasures.data import ma from flexmeasures.data.models.user import User as UserModel from flexmeasures.data.services.users import ( get_users, diff --git a/flexmeasures/api/v2_0/tests/test_api_v2_0_assets.py b/flexmeasures/api/v2_0/tests/test_api_v2_0_assets.py index 1279c0b1c..201baddd3 100644 --- a/flexmeasures/api/v2_0/tests/test_api_v2_0_assets.py +++ b/flexmeasures/api/v2_0/tests/test_api_v2_0_assets.py @@ -149,9 +149,8 @@ def test_post_an_asset_with_existing_name(client): json=post_data, headers={"content-type": "application/json", "Authorization": auth_token}, ) - assert asset_creation.status_code == 400 - assert "already exists" in asset_creation.json["message"] - assert "asset_name" in asset_creation.json["detail"] + assert asset_creation.status_code == 422 + assert "already exists" in asset_creation.json["message"]["json"]["name"][0] def test_post_an_asset_with_nonexisting_field(client): diff --git a/flexmeasures/data/__init__.py b/flexmeasures/data/__init__.py index f42f4554b..4ddf8e80c 100644 --- a/flexmeasures/data/__init__.py +++ b/flexmeasures/data/__init__.py @@ -2,25 +2,33 @@ from flask import Flask from flask_migrate import Migrate +from flask_marshmallow import Marshmallow from flexmeasures.data.config import configure_db_for, db from flexmeasures.data.auth_setup import configure_auth from flexmeasures.data.transactional import after_request_exception_rollback_session +ma: Marshmallow = Marshmallow() + + def register_at(app: Flask): # First configure the central db object and Alembic's migration tool configure_db_for(app) Migrate(app, db, directory=os.path.join(app.root_path, "data", "migrations")) + global ma + ma.init_app(app) + configure_auth(app, db) if app.cli: # Register some useful custom scripts with the flask cli with app.app_context(): - import flexmeasures.data.scripts.cli_tasks.background_workers - import flexmeasures.data.scripts.cli_tasks.db_pop - import flexmeasures.data.scripts.cli_tasks.data_collection + import flexmeasures.data.scripts.cli_tasks.jobs + import flexmeasures.data.scripts.cli_tasks.data_add + import flexmeasures.data.scripts.cli_tasks.data_delete + import flexmeasures.data.scripts.cli_tasks.db_ops import flexmeasures.data.scripts.cli_tasks.testing # noqa: F401 app.teardown_request(after_request_exception_rollback_session) diff --git a/flexmeasures/data/config.py b/flexmeasures/data/config.py index cf6c7cf04..26af9d753 100644 --- a/flexmeasures/data/config.py +++ b/flexmeasures/data/config.py @@ -38,11 +38,11 @@ def configure_db_for(app: Flask): # they will be registered properly on the metadata. Otherwise # you will have to import them first before calling configure_db(). from flexmeasures.data.models import ( # noqa: F401 - assets, time_series, - data_sources, markets, + assets, weather, + data_sources, user, task_runs, forecasting, diff --git a/flexmeasures/data/models/assets.py b/flexmeasures/data/models/assets.py index 369a612c0..cf6e16b73 100644 --- a/flexmeasures/data/models/assets.py +++ b/flexmeasures/data/models/assets.py @@ -3,9 +3,13 @@ import isodate import timely_beliefs as tb from sqlalchemy.orm import Query +from marshmallow import ValidationError, validate, validates, fields, validates_schema from flexmeasures.data.config import db -from flexmeasures.data.models.time_series import Sensor, TimedValue +from flexmeasures.data import ma +from flexmeasures.data.models.time_series import Sensor, SensorSchema, TimedValue +from flexmeasures.data.models.markets import Market +from flexmeasures.data.models.user import User from flexmeasures.utils.entity_address_utils import build_entity_address from flexmeasures.utils.flexmeasures_inflection import humanize, pluralize @@ -175,6 +179,67 @@ def __repr__(self): ) +class AssetSchema(SensorSchema, ma.SQLAlchemySchema): + """ + Asset schema, with validations. + """ + + class Meta: + model = Asset + + @validates("name") + def validate_name(self, name: str): + asset = Asset.query.filter(Asset.name == name).one_or_none() + if asset: + raise ValidationError(f"An asset with the name {name} already exists.") + + @validates("owner_id") + def validate_owner(self, owner_id: int): + owner = User.query.get(owner_id) + if not owner: + raise ValidationError(f"Owner with id {owner_id} doesn't exist.") + if "Prosumer" not in owner.flexmeasures_roles: + raise ValidationError( + "Asset owner must have role 'Prosumer'." + f" User {owner_id} has roles {[r.name for r in owner.flexmeasures_roles]}." + ) + + @validates("market_id") + def validate_market(self, market_id: int): + market = Market.query.get(market_id) + if not market: + raise ValidationError(f"Market with id {market_id} doesn't exist.") + + @validates("asset_type_name") + def validate_asset_type(self, asset_type_name: str): + asset_type = AssetType.query.get(asset_type_name) + if not asset_type: + raise ValidationError(f"Asset type {asset_type_name} doesn't exist.") + + @validates_schema(skip_on_field_errors=False) + def validate_soc_constraints(self, data, **kwargs): + if "max_soc_in_mwh" in data and "min_soc_in_mwh" in data: + if data["max_soc_in_mwh"] < data["min_soc_in_mwh"]: + errors = { + "max_soc_in_mwh": "This value must be equal or higher than the minimum soc." + } + raise ValidationError(errors) + + id = ma.auto_field() + display_name = fields.Str(validate=validate.Length(min=4)) + capacity_in_mw = fields.Float(required=True, validate=validate.Range(min=0.0001)) + min_soc_in_mwh = fields.Float(validate=validate.Range(min=0)) + max_soc_in_mwh = fields.Float(validate=validate.Range(min=0)) + soc_in_mwh = ma.auto_field() + soc_datetime = ma.auto_field() + soc_udi_event_id = ma.auto_field() + latitude = fields.Float(required=True, validate=validate.Range(min=-90, max=90)) + longitude = fields.Float(required=True, validate=validate.Range(min=-180, max=180)) + asset_type_name = ma.auto_field(required=True) + owner_id = ma.auto_field(required=True) + market_id = ma.auto_field(required=True) + + def assets_share_location(assets: List[Asset]) -> bool: """ Return True if all assets in this list are located on the same spot. diff --git a/flexmeasures/data/models/time_series.py b/flexmeasures/data/models/time_series.py index 08e721509..98f91b85f 100644 --- a/flexmeasures/data/models/time_series.py +++ b/flexmeasures/data/models/time_series.py @@ -4,8 +4,10 @@ from sqlalchemy.ext.declarative import declared_attr from sqlalchemy.orm import Query, Session import timely_beliefs as tb +from marshmallow import Schema, fields from flexmeasures.data.config import db +from flexmeasures.data import ma from flexmeasures.data.queries.utils import ( add_belief_timing_filter, add_user_source_filter, @@ -20,6 +22,26 @@ class Sensor(db.Model, tb.SensorDBMixin): """A sensor measures events. """ +class SensorSchema(Schema): + """ + Base sensor schema. + + Here we include all fields which are implemented by timely_beliefs.SensorDBMixin + All classes inheriting from timely beliefs sensor don't need to repeat these. + In a while, this schema can represent our unified Sensor class. + + When subclassing, also subclass from `ma.SQLAlchemySchema` and add your own DB model class, e.g.: + + class Meta: + model = Asset + """ + + name = ma.auto_field(required=True) + unit = ma.auto_field(required=True) + timezone = ma.auto_field() + event_resolution = fields.TimeDelta(required=True, precision="minutes") + + class TimedValue(object): """ A mixin of all tables that store time series data, either forecasts or measurements. diff --git a/flexmeasures/data/models/weather.py b/flexmeasures/data/models/weather.py index 83556054f..00dcc05dd 100644 --- a/flexmeasures/data/models/weather.py +++ b/flexmeasures/data/models/weather.py @@ -6,9 +6,12 @@ from sqlalchemy.ext.hybrid import hybrid_method, hybrid_property from sqlalchemy.sql.expression import func from sqlalchemy.schema import UniqueConstraint +from marshmallow import ValidationError, validates, validate, fields from flexmeasures.data.config import db -from flexmeasures.data.models.time_series import Sensor, TimedValue + +from flexmeasures.data import ma +from flexmeasures.data.models.time_series import Sensor, SensorSchema, TimedValue from flexmeasures.utils.geo_utils import parse_lat_lng from flexmeasures.utils.flexmeasures_inflection import humanize @@ -168,6 +171,37 @@ def to_dict(self) -> Dict[str, str]: return dict(name=self.name, sensor_type=self.weather_sensor_type_name) +class WeatherSensorSchema(SensorSchema, ma.SQLAlchemySchema): + """ + WeatherSensor schema, with validations. + """ + + class Meta: + model = WeatherSensor + + @validates("name") + def validate_name(self, name: str): + sensor = WeatherSensor.query.filter( + WeatherSensor.name == name.lower() + ).one_or_none() + if sensor: + raise ValidationError( + f"A weather sensor with the name {name} already exists." + ) + + @validates("weather_sensor_type_name") + def validate_weather_sensor_type(self, weather_sensor_type_name: str): + weather_sensor_type = WeatherSensorType.query.get(weather_sensor_type_name) + if not weather_sensor_type: + raise ValidationError( + f"Weather sensor type {weather_sensor_type_name} doesn't exist." + ) + + weather_sensor_type_name = ma.auto_field(required=True) + latitude = fields.Float(required=True, validate=validate.Range(min=-90, max=90)) + longitude = fields.Float(required=True, validate=validate.Range(min=-180, max=180)) + + class Weather(TimedValue, db.Model): """ All weather measurements are stored in one slim table. diff --git a/flexmeasures/data/scripts/cli_tasks/data_add.py b/flexmeasures/data/scripts/cli_tasks/data_add.py new file mode 100644 index 000000000..d5152a1c3 --- /dev/null +++ b/flexmeasures/data/scripts/cli_tasks/data_add.py @@ -0,0 +1,327 @@ +"""CLI Tasks for (de)populating the database - most useful in development""" + +from datetime import timedelta +from typing import List + +import pandas as pd +import pytz +from flask import current_app as app +from flask.cli import with_appcontext +from flask_security.utils import hash_password +import click +import getpass + +from flexmeasures.data.services.forecasting import create_forecasting_jobs +from flexmeasures.data.services.users import create_user +from flexmeasures.data.models.assets import Asset, AssetSchema +from flexmeasures.data.models.markets import Market +from flexmeasures.data.models.weather import WeatherSensor, WeatherSensorSchema + + +@click.group("add") +def fm_add_data(): + """FlexMeasures: Add data.""" + + +@fm_add_data.command("user") +@with_appcontext +@click.option("--username", required=True) +@click.option("--email", required=True) +@click.option("--roles", help="e.g. anonymous,Prosumer,CPO") +@click.option( + "--timezone", + default="UTC", + help="timezone as string, e.g. 'UTC' or 'Europe/Amsterdam'", +) +def new_user(username: str, email: str, roles: List[str], timezone: str): + """ + Create a FlexMeasures user. + + The `users create` task from Flask Security Too is too simple for us. + Use this to add email, timezone and roles. + """ + try: + pytz.timezone(timezone) + except pytz.UnknownTimeZoneError: + print("Timezone %s is unknown!" % timezone) + raise click.Abort + pwd1 = getpass.getpass(prompt="Please enter the password:") + pwd2 = getpass.getpass(prompt="Please repeat the password:") + if pwd1 != pwd2: + print("Passwords do not match!") + raise click.Abort + created_user = create_user( + username=username, + email=email, + password=hash_password(pwd1), + timezone=timezone, + user_roles=roles, + check_deliverability=False, + ) + app.db.session.commit() + print(f"Successfully created user {created_user}") + + +@fm_add_data.command("asset") +@with_appcontext +@click.option("--name", required=True) +@click.option("--asset-type-name", required=True) +@click.option("--unit", required=True, help="e.g. MW, kW/h", default="MW") +@click.option("--capacity-in-MW", required=True, type=float) +@click.option( + "--event-resolution", + required=True, + type=int, + help="Expected resolution of the data in minutes", +) +@click.option( + "--latitude", + required=True, + type=float, + help="Latitude of the asset's location", +) +@click.option( + "--longitude", + required=True, + type=float, + help="Longitude of the asset's location", +) +@click.option( + "--owner-id", required=True, type=int, help="Id of the user who owns this asset." +) +@click.option( + "--market-id", + type=int, + help="Id of the market used to price this asset. Defaults to a dummy TOU market.", +) +@click.option( + "--timezone", + default="UTC", + help="timezone as string, e.g. 'UTC' (default) or 'Europe/Amsterdam'.", +) +def new_asset(**args): + """ + Create a new asset. + """ + check_timezone(args["timezone"]) + # if no market given, select dummy market + if args["market_id"] is None: + dummy_market = Market.query.filter(Market.name == "dummy-tou").one_or_none() + if not dummy_market: + print( + "No market ID given and also no dummy TOU market available. Maybe add structure first." + ) + raise click.Abort() + args["market_id"] = dummy_market.id + check_errors(AssetSchema().validate(args)) + args["event_resolution"] = timedelta(minutes=args["event_resolution"]) + asset = Asset(**args) + app.db.session.add(asset) + app.db.session.commit() + print(f"Successfully created asset with ID:{asset.id}.") + print(f" You can access it at its entity address {asset.entity_address}") + + +@fm_add_data.command("weather-sensor") +@with_appcontext +@click.option("--name", required=True) +@click.option("--weather-sensor-type-name", required=True) +@click.option("--unit", required=True, help="e.g. °C, m/s, kW/m²") +@click.option( + "--event-resolution", + required=True, + type=int, + help="Expected resolution of the data in minutes", +) +@click.option( + "--latitude", + required=True, + type=float, + help="Latitude of the sensor's location", +) +@click.option( + "--longitude", + required=True, + type=float, + help="Longitude of the sensor's location", +) +@click.option( + "--timezone", + default="UTC", + help="timezone as string, e.g. 'UTC' (default) or 'Europe/Amsterdam'", +) +def add_weather_sensor(**args): + """Add a weather sensor.""" + check_timezone(args["timezone"]) + check_errors(WeatherSensorSchema().validate(args)) + args["event_resolution"] = timedelta(minutes=args["event_resolution"]) + sensor = WeatherSensor(**args) + app.db.session.add(sensor) + app.db.session.commit() + print(f"Successfully created sensor with ID:{sensor.id}.") + # TODO: uncomment when #66 has landed + # print(f" You can access it at its entity address {sensor.entity_address}") + + +@fm_add_data.command("structure") +@with_appcontext +def add_initial_structure(): + """Initialize structural data like asset types, market types and weather sensor types.""" + from flexmeasures.data.scripts.data_gen import populate_structure + + populate_structure(app.db) + + +@fm_add_data.command("forecasts") +@with_appcontext +@click.option( + "--asset-type", + type=click.Choice(["Asset", "Market", "WeatherSensor"]), + help="The generic asset type for which to generate forecasts." + " Follow up with Asset, Market or WeatherSensor.", +) +@click.option( + "--asset-id", + help="Populate (time series) data for a single asset only. Follow up with the asset's ID. " + "We still need --asset-type, as well, so we know where to look this ID up.", +) +@click.option( + "--from-date", + default="2015-02-08", + help="Forecast from date (inclusive). Follow up with a date in the form yyyy-mm-dd.", +) +@click.option( + "--to-date", + default="2015-12-31", + help="Forecast to date (inclusive). Follow up with a date in the form yyyy-mm-dd.", +) +@click.option( + "--horizon", + "horizons", + multiple=True, + type=click.Choice(["1", "6", "24", "48"]), + default=["1", "6", "24", "48"], + help="Forecasting horizon in hours. This argument can be given multiple times.", +) +@click.option( + "--as-job", + is_flag=True, + help="Whether to queue a forecasting job instead of computing directly." + " Useful to run locally and create forecasts on a remote server. In that case, just point the redis db in your" + " config settings to that of the remote server. To process the job, run a worker to process the forecasting queue.", +) +def create_forecasts( + asset_type: str = None, + asset_id: int = None, + from_date: str = "2015-02-08", + to_date: str = "2015-12-31", + horizons: List[str] = ["1"], + as_job: bool = False, +): + """ + Create forecasts. + + For example: + + --from_date 2015-02-02 --to_date 2015-02-04 --horizon_hours 6 + + This will create forecast values from 0am on May 2nd to 0am on May 4th, + based on a 6 hour horizon. + + """ + # make horizons + horizons = [timedelta(hours=int(h)) for h in horizons] + + # apply timezone: + timezone = app.config.get("FLEXMEASURES_TIMEZONE") + from_date = pd.Timestamp(from_date).tz_localize(timezone) + to_date = pd.Timestamp(to_date).tz_localize(timezone) + + if as_job: + if asset_type == "Asset": + value_type = "Power" + if asset_type == "Market": + value_type = "Price" + if asset_type == "WeatherSensor": + value_type = "Weather" + + for horizon in horizons: + # Note that this time period refers to the period of events we are forecasting, while in create_forecasting_jobs + # the time period refers to the period of belief_times, therefore we are subtracting the horizon. + create_forecasting_jobs( + asset_id=asset_id, + timed_value_type=value_type, + horizons=[horizon], + start_of_roll=from_date - timedelta(hours=horizon), + end_of_roll=to_date - timedelta(hours=horizon), + ) + else: + from flexmeasures.data.scripts.data_gen import populate_time_series_forecasts + + populate_time_series_forecasts( + app.db, horizons, from_date, to_date, asset_type, asset_id + ) + + +@fm_add_data.command("external-weather-forecasts") +@click.option( + "--region", + type=str, + default="", + help="Name of the region (will create sub-folder, should later tag the forecast in the DB, probably).", +) +@click.option( + "--location", + type=str, + required=True, + help='Measurement location(s). "latitude,longitude" or "top-left-latitude,top-left-longitude:' + 'bottom-right-latitude,bottom-right-longitude." The first format defines one location to measure.' + " The second format defines a region of interest with several (>=4) locations" + ' (see also the "method" and "num_cells" parameters for this feature).', +) +@click.option( + "--num_cells", + type=int, + default=1, + help="Number of cells on the grid. Only used if a region of interest has been mapped in the location parameter.", +) +@click.option( + "--method", + default="hex", + type=click.Choice(["hex", "square"]), + help="Grid creation method. Only used if a region of interest has been mapped in the location parameter.", +) +@click.option( + "--store-in-db/--store-as-json-files", + default=False, + help="Store forecasts in the database, or simply save as json files.", +) +def collect_weather_data(region, location, num_cells, method, store_in_db): + """ + Collect weather forecasts from the DarkSky API + + This function can get weather data for one location or for several location within + a geometrical grid (See the --location parameter). + """ + from flexmeasures.data.scripts.grid_weather import get_weather_forecasts + + get_weather_forecasts(app, region, location, num_cells, method, store_in_db) + + +app.cli.add_command(fm_add_data) + + +def check_timezone(timezone): + try: + pytz.timezone(timezone) + except pytz.UnknownTimeZoneError: + print("Timezone %s is unknown!" % timezone) + raise click.Abort + + +def check_errors(errors: list): + if errors: + print( + f"Please correct the following errors:\n{errors}.\n Use the --help flag to learn more." + ) + raise click.Abort diff --git a/flexmeasures/data/scripts/cli_tasks/data_collection.py b/flexmeasures/data/scripts/cli_tasks/data_collection.py deleted file mode 100644 index 7ab6fc573..000000000 --- a/flexmeasures/data/scripts/cli_tasks/data_collection.py +++ /dev/null @@ -1,44 +0,0 @@ -"""CLI tasks tasks to collect third-party data.""" - -from flask import current_app as app -import click - - -@app.cli.command() -@click.option( - "--region", - type=str, - default="", - help="Name of the region (will create sub-folder, should later tag the forecast in the DB, probably).", -) -@click.option( - "--location", - type=str, - required=True, - help='Measurement location(s). "latitude,longitude" or "top-left-latitude,top-left-longitude:' - 'bottom-right-latitude,bottom-right-longitude." The first format defines one location to measure.' - " The second format defines a region of interest with several (>=4) locations" - ' (see also the "method" and "num_cells" parameters for this feature).', -) -@click.option( - "--num_cells", - type=int, - default=1, - help="Number of cells on the grid. Only used if a region of interest has been mapped in the location parameter.", -) -@click.option( - "--method", - default="hex", - type=click.Choice(["hex", "square"]), - help="Grid creation method. Only used if a region of interest has been mapped in the location parameter.", -) -@click.option( - "--store-in-db/--store-as-json-files", - default=False, - help="Store forecasts in the database, or simply save as json files.", -) -def collect_weather_data(region, location, num_cells, method, store_in_db): - """Collect weather data for a grid. Leave bottom right empty for only one location (top left).""" - from flexmeasures.data.scripts.grid_weather import get_weather_forecasts - - get_weather_forecasts(app, region, location, num_cells, method, store_in_db) diff --git a/flexmeasures/data/scripts/cli_tasks/data_delete.py b/flexmeasures/data/scripts/cli_tasks/data_delete.py new file mode 100644 index 000000000..b7b630acf --- /dev/null +++ b/flexmeasures/data/scripts/cli_tasks/data_delete.py @@ -0,0 +1,144 @@ +from typing import Optional + +import click +from flask import current_app as app +from flask.cli import with_appcontext + +from flexmeasures.data.models.assets import Power +from flexmeasures.data.models.markets import Price +from flexmeasures.data.models.weather import Weather +from flexmeasures.data.scripts.data_gen import get_affected_classes +from flexmeasures.data.services.users import find_user_by_email, delete_user + + +@click.group("delete") +def fm_delete_data(): + """FlexMeasures: Delete data.""" + + +@fm_delete_data.command("user") +@with_appcontext +@click.option("--email") +def delete_user_and_data(email: str): + """ + Delete a user & also their data. + """ + the_user = find_user_by_email(email) + if the_user is None: + print(f"Could not find user with email address '{email}' ...") + return + delete_user(the_user) + app.db.session.commit() + + +def confirm_deletion( + structure: bool = False, + data: bool = False, + asset_type: Optional[str] = None, + is_by_id: bool = False, +): + affected_classes = get_affected_classes(structure, data) + if data and asset_type: + if asset_type == "Asset": + affected_classes.remove(Price) + affected_classes.remove(Weather) + elif asset_type == "Market": + affected_classes.remove(Power) + affected_classes.remove(Weather) + elif asset_type == "WeatherSensor": + affected_classes.remove(Power) + affected_classes.remove(Price) + prompt = "This deletes all %s entries from %s.\nDo you want to continue?" % ( + " and ".join( + ", ".join( + [affected_class.__tablename__ for affected_class in affected_classes] + ).rsplit(", ", 1) + ), + app.db.engine, + ) + if is_by_id: + prompt = prompt.replace(" all ", " ") + if not click.confirm(prompt): + raise click.Abort() + + +@fm_delete_data.command("structure") +@with_appcontext +@click.option( + "--force/--no-force", default=False, help="Skip warning about consequences." +) +def delete_structure(force): + """ + Delete all structural (non time-series) data like assets (types), + markets (types) and weather sensors (types) and users. + """ + if not force: + confirm_deletion(structure=True) + from flexmeasures.data.scripts.data_gen import depopulate_structure + + depopulate_structure(app.db) + + +@fm_delete_data.command("measurements") +@with_appcontext +@click.option( + "--asset-type", + help="Depopulate (time series) data for a specific generic asset type only." + "Follow up with Asset, Market or WeatherSensor.", +) +@click.option( + "--asset-id", + type=int, + help="Delete (time series) data for a single asset only. Follow up with the asset's ID. " + "We still need --asset-type, as well, so we know where to look this ID up.", +) +@click.option( + "--force/--no-force", default=False, help="Skip warning about consequences." +) +def delete_measurements( + force: bool, + asset_type: Optional[str] = None, + asset_id: Optional[int] = None, +): + """ Delete measurements (with horizon <= 0).""" + if not force: + confirm_deletion( + data=True, asset_type=asset_type, is_by_id=asset_id is not None + ) + from flexmeasures.data.scripts.data_gen import depopulate_measurements + + depopulate_measurements(app.db, asset_type, asset_id) + + +@fm_delete_data.command("prognoses") +@with_appcontext +@click.option( + "--force/--no-force", default=False, help="Skip warning about consequences." +) +@click.option( + "--asset-type", + help="Depopulate (time series) data for a specific generic asset type only. " + "Follow up with Asset, Market or WeatherSensor.", +) +@click.option( + "--asset-id", + type=int, + help="Depopulate (time series) data for a single asset only. Follow up with the asset's ID. " + "Use in combination with --asset-type, so we know where to look this name up.", +) +def delete_prognoses( + force: bool, + asset_type: Optional[str] = None, + asset_id: Optional[int] = None, +): + """Delete forecasts and schedules (forecasts > 0).""" + if not force: + confirm_deletion( + data=True, asset_type=asset_type, is_by_id=asset_id is not None + ) + from flexmeasures.data.scripts.data_gen import depopulate_prognoses + + depopulate_prognoses(app.db, asset_type, asset_id) + + +app.cli.add_command(fm_delete_data) diff --git a/flexmeasures/data/scripts/cli_tasks/db_ops.py b/flexmeasures/data/scripts/cli_tasks/db_ops.py new file mode 100644 index 000000000..461ed4a04 --- /dev/null +++ b/flexmeasures/data/scripts/cli_tasks/db_ops.py @@ -0,0 +1,140 @@ +"""CLI Tasks for saving, resetting, etc of the database""" + +from datetime import datetime +import subprocess + +from flask import current_app as app +from flask.cli import with_appcontext +import flask_migrate as migrate +import click + + +BACKUP_PATH = app.config.get("FLEXMEASURES_DB_BACKUP_PATH") + + +@click.group("db-ops") +def fm_db_ops(): + """FlexMeasures: Reset, Dump/Restore or Save/Load the DB data.""" + + +@fm_db_ops.command() +@with_appcontext +def reset(): + """Reset database data and re-create tables from data model.""" + if not app.debug: + prompt = ( + "This deletes all data and re-creates the tables on %s.\nDo you want to continue?" + % app.db.engine + ) + if not click.confirm(prompt): + click.echo("I did nothing.") + return + from flexmeasures.data.scripts.data_gen import reset_db + + current_version = migrate.current() + reset_db(app.db) + migrate.stamp(current_version) + + +@fm_db_ops.command() +@with_appcontext +@click.option("--name", help="Unique name for saving the backup.") +@click.option("--dir", default=BACKUP_PATH, help="Directory for saving backups.") +@click.option( + "--structure/--no-structure", + default=True, + help="Save structural data like asset (types), market (types)," + " weather (sensors), users, roles.", +) +@click.option( + "--data/--no-data", + default=False, + help="Save (time series) data to a backup. Only do this for small data sets!", +) +def save(name: str, dir: str = BACKUP_PATH, structure: bool = True, data: bool = False): + """Backup db content to files.""" + if name: + from flexmeasures.data.scripts.data_gen import save_tables + + save_tables(app.db, name, structure=structure, data=data, backup_path=dir) + else: + click.echo( + "You must specify a unique name for the backup: --name " + ) + + +@fm_db_ops.command() +@with_appcontext +@click.option("--name", help="Name of the backup.") +@click.option("--dir", default=BACKUP_PATH, help="Directory for loading backups.") +@click.option( + "--structure/--no-structure", + default=True, + help="Load structural data like asset (types), market (types)," + " weather (sensors), users, roles.", +) +@click.option("--data/--no-data", default=False, help="Load (time series) data.") +def load(name: str, dir: str = BACKUP_PATH, structure: bool = True, data: bool = False): + """Load backed-up contents (see `db-ops save`), run `reset` first.""" + if name: + from flexmeasures.data.scripts.data_gen import load_tables + + load_tables(app.db, name, structure=structure, data=data, backup_path=dir) + else: + click.echo("You must specify the name of the backup: --name ") + + +@fm_db_ops.command() +@with_appcontext +def dump(): + """Create a dump of all current data (using `pg_dump`).""" + db_uri = app.config.get("SQLALCHEMY_DATABASE_URI") + db_host_and_db_name = db_uri.split("@")[-1] + click.echo(f"Backing up {db_host_and_db_name} database") + db_name = db_host_and_db_name.split("/")[-1] + time_of_saving = datetime.now().strftime("%F-%H%M") + dump_filename = f"pgbackup_{db_name}_{time_of_saving}.dump" + command_for_dumping = f"pg_dump --no-privileges --no-owner --data-only --format=c --file={dump_filename} {db_uri}" + try: + proc = subprocess.Popen(command_for_dumping, shell=True) # , env={ + # 'PGPASSWORD': DB_PASSWORD + # }) + proc.wait() + click.echo(f"db dump successful: saved to {dump_filename}") + + except Exception as e: + click.echo(f"Exception happened during dump: {e}") + click.echo("db dump unsuccessful") + + +@fm_db_ops.command() +@with_appcontext +@click.argument("file", type=click.Path(exists=True)) +def restore(file: str): + """Restore the dump file, see `db-ops dump` (run `reset` first). + + From the command line: + + % flexmeasures db-ops dump + % flexmeasures db-ops reset + % flexmeasures db-ops restore FILE + + """ + + db_uri = app.config.get("SQLALCHEMY_DATABASE_URI") + db_host_and_db_name = db_uri.split("@")[-1] + click.echo(f"Restoring {db_host_and_db_name} database from file {file}") + command_for_restoring = f"pg_restore -d {db_uri} {file}" + try: + proc = subprocess.Popen(command_for_restoring, shell=True) # , env={ + # 'PGPASSWORD': DB_PASSWORD + # }) + proc.wait() + click.echo("db restore successful") + + except Exception as e: + click.echo(f"Exception happened during restore: {e}") + click.echo("db restore unsuccessful") + + +app.cli.add_command(fm_db_ops) diff --git a/flexmeasures/data/scripts/cli_tasks/db_pop.py b/flexmeasures/data/scripts/cli_tasks/db_pop.py deleted file mode 100644 index 8e118d04f..000000000 --- a/flexmeasures/data/scripts/cli_tasks/db_pop.py +++ /dev/null @@ -1,383 +0,0 @@ -"""CLI Tasks for (de)populating the database - most useful in development""" - -from datetime import datetime, timedelta -import subprocess -from typing import List - -import pandas as pd -import pytz -from flask import current_app as app -import flask_migrate as migrate -from flask_security.utils import hash_password -import click -import getpass - -from flexmeasures.data.services.forecasting import create_forecasting_jobs -from flexmeasures.data.services.users import ( - create_user, - find_user_by_email, - delete_user, -) -from flexmeasures.data.scripts.data_gen import get_affected_classes - - -BACKUP_PATH = app.config.get("FLEXMEASURES_DB_BACKUP_PATH") - - -@app.cli.command() -@click.option("--username", required=True) -@click.option("--email", required=True) -@click.option("--roles", help="e.g. anonymous,Prosumer,CPO") -@click.option( - "--timezone", - default="Europe/Amsterdam", - help="timezone as string, e.g. 'UTC' or 'Europe/Amsterdam'", -) -def new_user( - username: str, email: str, roles: List[str], timezone: str = "Europe/Amsterdam" -): - """ - Create a FlexMeasures user. - - The `users create` task from Flask Security Too is too simple for us. - Use this to add email, timezone and roles. - """ - try: - pytz.timezone(timezone) - except pytz.UnknownTimeZoneError: - print("Timezone %s is unknown!" % timezone) - return - pwd1 = getpass.getpass(prompt="Please enter the password:") - pwd2 = getpass.getpass(prompt="Please repeat the password:") - if pwd1 != pwd2: - print("Passwords do not match!") - return - create_user( - username=username, - email=email, - password=hash_password(pwd1), - timezone=timezone, - user_roles=roles, - check_deliverability=False, - ) - app.db.session.commit() - - -@app.cli.command() -@click.option("--email") -def delete_user_and_data(email: str): - """ - Delete a user, which also deletes their data. - """ - the_user = find_user_by_email(email) - if the_user is None: - print(f"Could not find user with email address '{email}' ...") - return - delete_user(the_user) - app.db.session.commit() - - -# @app.before_first_request -@app.cli.command() -@click.option( - "--structure/--no-structure", - default=False, - help="Populate structural data (right now: asset types).", -) -@click.option( - "--forecasts/--no-forecasts", - default=False, - help="Populate (time series) forecasts. Will do nothing without structural data present. Data links into structure.", -) -@click.option( - "--asset-type", - help="Populate (time series) data for a specific generic asset type only." - " Follow up with Asset, Market or WeatherSensor.", -) -@click.option( - "--asset", - help="Populate (time series) data for a single asset only. Follow up with the asset's name. " - "Use in combination with --asset-type, so we know where to look this name up.", -) -@click.option( - "--from_date", - default="2015-02-08", - help="Forecast from date (inclusive). Follow up with a date in the form yyyy-mm-dd.", -) -@click.option( - "--to_date", - default="2015-12-31", - help="Forecast to date (inclusive). Follow up with a date in the form yyyy-mm-dd.", -) -def db_populate( - structure: bool, - forecasts: bool, - asset_type: str = None, - from_date: str = "2015-02-08", - to_date: str = "2015-12-31", - asset: str = None, -): - """Initialize the database with static values. - TODO: split into a function for structural data and one for forecasts. - """ - if structure: - from flexmeasures.data.scripts.data_gen import populate_structure - - populate_structure(app.db) - if forecasts: - from flexmeasures.data.scripts.data_gen import populate_time_series_forecasts - - populate_time_series_forecasts(app.db, asset_type, asset, from_date, to_date) - if not structure and not forecasts: - click.echo( - "I did nothing as neither --structure nor --forecasts was given. Decide what you want!" - ) - - -@app.cli.command() -@click.option( - "--structure/--no-structure", - default=False, - help="Depopulate structural data like asset (types), market (types)," - " weather (sensors), users, roles.", -) -@click.option("--data/--no-data", default=False, help="Depopulate (time series) data.") -@click.option( - "--forecasts/--no-forecasts", - default=False, - help="Depopulate (time series) forecasts.", -) -@click.option( - "--force/--no-force", default=False, help="Skip warning about consequences." -) -@click.option( - "--asset-type", - help="Depopulate (time series) data for a specific generic asset type only." - "Follow up with Asset, Market or WeatherSensor.", -) -@click.option( - "--asset", - help="Depopulate (time series) data for a single asset only. Follow up with the asset's name. " - "Use in combination with --asset-type, so we know where to look this name up.", -) -def db_depopulate( - structure: bool, - data: bool, - forecasts: bool, - force: bool, - asset_type: str = None, - asset: str = None, -): - """Remove all values.""" - if not data and not structure and not forecasts: - click.echo( - "Neither --data nor --forecasts nor --structure given ... doing nothing." - ) - return - if not force and (data or structure or forecasts): - affected_tables = get_affected_classes(structure, data or forecasts) - prompt = "This deletes all %s entries from %s.\nDo you want to continue?" % ( - " and ".join( - ", ".join( - [affected_table.__tablename__ for affected_table in affected_tables] - ).rsplit(", ", 1) - ), - app.db.engine, - ) - if not click.confirm(prompt): - return - if forecasts: - from flexmeasures.data.scripts.data_gen import depopulate_forecasts - - depopulate_forecasts(app.db, asset_type, asset) - if data: - from flexmeasures.data.scripts.data_gen import depopulate_data - - depopulate_data(app.db, asset_type, asset) - if structure: - from flexmeasures.data.scripts.data_gen import depopulate_structure - - depopulate_structure(app.db) - - -@app.cli.command() -@click.option("--load", help="Reset to static data from file.") -@click.option("--dir", default=BACKUP_PATH, help="Directory for loading backups.") -@click.option( - "--structure/--no-structure", - default=False, - help="Load structural data like asset (types), market (types)," - " weather (sensors), users, roles.", -) -@click.option("--data/--no-data", default=False, help="Load (time series) data.") -def db_reset( - load: str = None, dir: str = BACKUP_PATH, structure: bool = True, data: bool = False -): - """Reset database, with options to load fresh data.""" - if not app.debug: - prompt = ( - "This deletes all data and resets the structure on %s.\nDo you want to continue?" - % app.db.engine - ) - if not click.confirm(prompt): - click.echo("I did nothing.") - return - from flexmeasures.data.scripts.data_gen import reset_db - - current_version = migrate.current() - reset_db(app.db) - migrate.stamp(current_version) - - if load: - if not data and not structure: - click.echo("Neither --data nor --structure given ... loading nothing.") - return - from flexmeasures.data.scripts.data_gen import load_tables - - load_tables(app.db, load, structure, data, dir) - - -@app.cli.command() -@click.option("--name", help="Unique name for saving the backup.") -@click.option("--dir", default=BACKUP_PATH, help="Directory for saving backups.") -@click.option( - "--structure/--no-structure", - default=True, - help="Save structural data like asset (types), market (types)," - " weather (sensors), users, roles.", -) -@click.option( - "--data/--no-data", - default=False, - help="Save (time series) data to a backup. Only do this for small data sets!", -) -def db_save( - name: str, dir: str = BACKUP_PATH, structure: bool = True, data: bool = False -): - """Save structure of the database to a backup file.""" - if name: - from flexmeasures.data.scripts.data_gen import save_tables - - save_tables(app.db, name, structure=structure, data=data, backup_path=dir) - else: - click.echo( - "You must specify a unique name for the backup: --name " - ) - - -@app.cli.command() -@click.option("--name", help="Name of the backup.") -@click.option("--dir", default=BACKUP_PATH, help="Directory for loading backups.") -@click.option( - "--structure/--no-structure", - default=True, - help="Load structural data like asset (types), market (types)," - " weather (sensors), users, roles.", -) -@click.option("--data/--no-data", default=False, help="Load (time series) data.") -def db_load( - name: str, dir: str = BACKUP_PATH, structure: bool = True, data: bool = False -): - """Load structure and/or data for the database from a backup file.""" - if name: - from flexmeasures.data.scripts.data_gen import load_tables - - load_tables(app.db, name, structure=structure, data=data, backup_path=dir) - else: - click.echo("You must specify the name of the backup: --name ") - - -@app.cli.command() -@click.option("--asset-id", help="Asset id.") -@click.option( - "--from-date", - help="Forecast from date (inclusive). Follow up with a date in the form yyyy-mm-dd.", -) -@click.option( - "--to-date", - help="Forecast to date (exclusive!). Follow up with a date in the form yyyy-mm-dd.", -) -@click.option("--horizon-hours", default=1, help="Forecasting horizon in hours.") -def create_power_forecasts( - asset_id: int, - from_date: str, - to_date: str, - timezone: str = "Asia/Seoul", - horizon_hours: int = 1, -): - """Creates a forecasting job. - - Useful to run locally and create forecasts on a remote server. In that case, just point the redis db in your - config settings to that of the remote server. To process the job, run a worker to process the forecasting queue. - - For example: - - from_data = "2015-02-02" - to_date = "2015-02-04" - horizon_hours = 6 - - This creates 1 job that forecasts values from 0am on May 2nd to 0am on May 4th, - based on a 6 hour horizon. - Note that this time period refers to the period of events we are forecasting, while in create_forecasting_jobs - the time period refers to the period of belief_times, therefore we are subtracting the horizon. - """ - create_forecasting_jobs( - asset_id=asset_id, - timed_value_type="Power", - horizons=[timedelta(hours=horizon_hours)], - start_of_roll=pd.Timestamp(from_date).tz_localize(timezone) - - timedelta(hours=horizon_hours), - end_of_roll=pd.Timestamp(to_date).tz_localize(timezone) - - timedelta(hours=horizon_hours), - ) - - -@app.cli.command() -def db_dump(): - """Create a database dump of the database used by the app.""" - db_uri = app.config.get("SQLALCHEMY_DATABASE_URI") - db_host_and_db_name = db_uri.split("@")[-1] - click.echo(f"Backing up {db_host_and_db_name} database") - db_name = db_host_and_db_name.split("/")[-1] - time_of_saving = datetime.now().strftime("%F-%H%M") - dump_filename = f"pgbackup_{db_name}_{time_of_saving}.dump" - command_for_dumping = f"pg_dump --no-privileges --no-owner --data-only --format=c --file={dump_filename} {db_uri}" - try: - proc = subprocess.Popen(command_for_dumping, shell=True) # , env={ - # 'PGPASSWORD': DB_PASSWORD - # }) - proc.wait() - click.echo(f"db dump successful: saved to {dump_filename}") - - except Exception as e: - click.echo(f"Exception happened during dump: {e}") - click.echo("db dump unsuccessful") - - -@app.cli.command() -@click.argument("file", type=click.Path(exists=True)) -def db_restore(file: str): - """Restore the database used by the app, from a given database dump file, after you've reset the database. - - From the command line: - - % db-dump - % db-reset - % db-restore FILE - - """ - - db_uri = app.config.get("SQLALCHEMY_DATABASE_URI") - db_host_and_db_name = db_uri.split("@")[-1] - click.echo(f"Restoring {db_host_and_db_name} database from file {file}") - command_for_restoring = f"pg_restore -d {db_uri} {file}" - try: - proc = subprocess.Popen(command_for_restoring, shell=True) # , env={ - # 'PGPASSWORD': DB_PASSWORD - # }) - proc.wait() - click.echo("db restore successful") - - except Exception as e: - click.echo(f"Exception happened during restore: {e}") - click.echo("db restore unsuccessful") diff --git a/flexmeasures/data/scripts/cli_tasks/background_workers.py b/flexmeasures/data/scripts/cli_tasks/jobs.py similarity index 84% rename from flexmeasures/data/scripts/cli_tasks/background_workers.py rename to flexmeasures/data/scripts/cli_tasks/jobs.py index a71718910..8bef867c6 100644 --- a/flexmeasures/data/scripts/cli_tasks/background_workers.py +++ b/flexmeasures/data/scripts/cli_tasks/jobs.py @@ -2,13 +2,20 @@ import click from flask import current_app as app +from flask.cli import with_appcontext from rq import Queue, Worker from sqlalchemy.orm import configure_mappers from flexmeasures.data.services.forecasting import handle_forecasting_exception -@app.cli.command("run-worker") +@click.group("jobs") +def fm_jobs(): + """FlexMeasures: Job queueing.""" + + +@fm_jobs.command("run-worker") +@with_appcontext @click.option( "--name", default=None, @@ -23,8 +30,9 @@ ) def run_worker(name: str, queue: str): """ - Use this CLI task to let a worker process forecasting and/or scheduling jobs. - It uses the app context to find out which redis queues to use. + Start a worker process for forecasting and/or scheduling jobs. + + We use the app context to find out which redis queues to use. """ q_list = parse_queue_list(queue) @@ -51,7 +59,8 @@ def run_worker(name: str, queue: str): worker.work() -@app.cli.command("clear-queue") +@fm_jobs.command("clear-queue") +@with_appcontext @click.option( "--queue", default=None, @@ -60,8 +69,9 @@ def run_worker(name: str, queue: str): ) def clear_queue(queue: str): """ - Use this CLI task to clear a queue. - It uses the app context to find out which redis queues to use. + Clear a job queue. + + We use the app context to find out which redis queues to use. """ q_list = parse_queue_list(queue) @@ -90,3 +100,6 @@ def parse_queue_list(queue_names_str: str) -> List[Queue]: else: raise ValueError(f"Unknown queue '{q_name}'.") return q_list + + +app.cli.add_command(fm_jobs) diff --git a/flexmeasures/data/scripts/cli_tasks/testing.py b/flexmeasures/data/scripts/cli_tasks/testing.py index aebfe2d21..f5f51628e 100644 --- a/flexmeasures/data/scripts/cli_tasks/testing.py +++ b/flexmeasures/data/scripts/cli_tasks/testing.py @@ -22,8 +22,14 @@ handle_forecasting_exception, ) - -@app.cli.command() +""" +These functions are meant for FlexMeasures developers to manually test some internal +functionality. +They are not registered as app command per default, as we don't need to show them to users. +""" + +# un-comment to use as CLI function +# @app.cli.command() def test_making_forecasts(): """ Manual test to enqueue and process a forecasting job via redis queue @@ -70,7 +76,8 @@ def test_making_forecasts(): ) -@app.cli.command() +# un-comment to use as CLI function +# @app.cli.command() @click.option("--asset-type", help="Asset type name.") @click.option("--asset", help="Asset name.") @click.option( diff --git a/flexmeasures/data/scripts/data_gen.py b/flexmeasures/data/scripts/data_gen.py index 8f0c0eafc..a71a7e0b3 100644 --- a/flexmeasures/data/scripts/data_gen.py +++ b/flexmeasures/data/scripts/data_gen.py @@ -1,6 +1,7 @@ """ Populate the database with data we know or read in. """ +from typing import List, Optional from pathlib import Path from shutil import rmtree from datetime import datetime, timedelta @@ -13,6 +14,7 @@ from sqlalchemy.ext.serializer import loads, dumps from timetomodel.forecasting import make_rolling_forecasts from timetomodel.exceptions import MissingData, NaNData +import pytz from humanize import naturaldelta import inflect @@ -110,6 +112,63 @@ def add_asset_types(db: SQLAlchemy): ) +def add_weather_sensor_types(db: SQLAlchemy): + db.session.add(WeatherSensorType(name="temperature")) + db.session.add(WeatherSensorType(name="wind_speed")) + db.session.add(WeatherSensorType(name="radiation")) + + +def add_market_types(db: SQLAlchemy): + db.session.add( + MarketType( + name="day_ahead", + display_name="day-ahead market", + daily_seasonality=True, + weekly_seasonality=True, + yearly_seasonality=True, + ) + ) + db.session.add( + MarketType( + name="tou_tariff", + display_name="time-of use tariff", + daily_seasonality=True, + weekly_seasonality=False, + yearly_seasonality=True, + ) + ) + + +def add_dummy_tou_market(db: SQLAlchemy): + """ + Add a dummy time-of-use market with a 1-year resolution. + Also add a few price points, each covering a whole year. + + Note that for this market, the leap years will not have a price on + December 31st. To fix that, we should use 366 days as resolution, + but test what that involves on that day, or we need timely-beliefs to switch + to defining sensor event resolutions as nominal durations. + """ + market = Market( + name="dummy-tou", + event_resolution=timedelta(days=365), + market_type_name="tou_tariff", + unit="EUR/MWh", + ) + db.session.add(market) + source = DataSource.query.filter(DataSource.name == "Seita").one_or_none() + for year in range(2015, 2025): + db.session.add( + Price( + value=50, + datetime=datetime(year, 1, 1, tzinfo=pytz.utc), + horizon=timedelta(0), + data_source_id=source.id, + market=market, + ) + ) + + # ------------ Main functions -------------------------------- # These can registered at the app object as cli functions @@ -117,36 +176,36 @@ def add_asset_types(db: SQLAlchemy): @as_transaction def populate_structure(db: SQLAlchemy): """ - Add all meta data for assets, markets, users + Add initial structural data for assets, markets, data sources + + TODO: add user roles (they can get created on-the-fly, but we should be + more pro-active) """ click.echo("Populating the database %s with structural data ..." % db.engine) - add_asset_types(db) add_data_sources(db) - click.echo("DB now has %d AssetTypes" % db.session.query(AssetType).count()) + add_asset_types(db) + add_weather_sensor_types(db) + add_market_types(db) + add_dummy_tou_market(db) + click.echo("DB now has %d AssetType(s)" % db.session.query(AssetType).count()) + click.echo( + "DB now has %d WeatherSensorType(s)" + % db.session.query(WeatherSensorType).count() + ) + click.echo("DB now has %d MarketType(s)" % db.session.query(MarketType).count()) + click.echo("DB now has %d Market(s)" % db.session.query(Market).count()) @as_transaction # noqa: C901 def populate_time_series_forecasts( # noqa: C901 db: SQLAlchemy, - generic_asset_type: str = None, - generic_asset_name: str = None, - from_date: str = "2015-02-08", - to_date: str = "2015-12-31", + horizons: List[timedelta], + start: datetime, + end: datetime, + generic_asset_type: Optional[str] = None, + generic_asset_id: Optional[int] = None, ): - start = ensure_local_timezone( - datetime.strptime(from_date, "%Y-%m-%d"), tz_name=LOCAL_TIME_ZONE - ) - end = ensure_local_timezone( - datetime.strptime(to_date, "%Y-%m-%d") + timedelta(days=1), - tz_name=LOCAL_TIME_ZONE, - ) training_and_testing_period = timedelta(days=30) - horizons = ( - timedelta(hours=1), - timedelta(hours=6), - timedelta(hours=24), - timedelta(hours=48), - ) click.echo( "Populating the database %s with time series forecasts of %s ahead ..." @@ -161,7 +220,7 @@ def populate_time_series_forecasts( # noqa: C901 # List all generic assets for which to forecast. # Look into asset type if no asset name is given. If an asset name is given, generic_assets = [] - if generic_asset_name is None: + if generic_asset_id is None: if generic_asset_type is None or generic_asset_type == "WeatherSensor": sensors = WeatherSensor.query.all() generic_assets.extend(sensors) @@ -179,18 +238,16 @@ def populate_time_series_forecasts( # noqa: C901 return if generic_asset_type == "WeatherSensor": sensors = WeatherSensor.query.filter( - WeatherSensor.name == generic_asset_name + WeatherSensor.id == generic_asset_id ).one_or_none() if sensors is not None: generic_assets.append(sensors) if generic_asset_type == "Asset": - assets = Asset.query.filter(Asset.name == generic_asset_name).one_or_none() + assets = Asset.query.filter(Asset.id == generic_asset_id).one_or_none() if assets is not None: generic_assets.append(assets) if generic_asset_type == "Market": - markets = Market.query.filter( - Market.name == generic_asset_name - ).one_or_none() + markets = Market.query.filter(Market.id == generic_asset_id).one_or_none() if markets is not None: generic_assets.append(markets) if not generic_assets: @@ -334,15 +391,19 @@ def depopulate_structure(db: SQLAlchemy): @as_transaction -def depopulate_data( - db: SQLAlchemy, generic_asset_type: str = None, generic_asset_name: str = None +def depopulate_measurements( + db: SQLAlchemy, + generic_asset_type: Optional[str] = None, + generic_asset_id: Optional[id] = None, ): click.echo("Depopulating (time series) data from the database %s ..." % db.engine) num_prices_deleted = 0 num_power_measurements_deleted = 0 num_weather_measurements_deleted = 0 - if generic_asset_name is None: + # TODO: simplify this when sensors moved to one unified table + + if generic_asset_id is None: if generic_asset_type is None or generic_asset_type == "Market": num_prices_deleted = ( db.session.query(Price) @@ -370,7 +431,7 @@ def depopulate_data( if generic_asset_type == "Market": market = ( db.session.query(Market) - .filter(Market.name == generic_asset_name) + .filter(Market.id == generic_asset_id) .one_or_none() ) if market is not None: @@ -386,7 +447,7 @@ def depopulate_data( elif generic_asset_type == "Asset": asset = ( db.session.query(Asset) - .filter(Asset.name == generic_asset_name) + .filter(Asset.id == generic_asset_id) .one_or_none() ) if asset is not None: @@ -402,7 +463,7 @@ def depopulate_data( elif generic_asset_type == "WeatherSensor": sensor = ( db.session.query(WeatherSensor) - .filter(WeatherSensor.name == generic_asset_name) + .filter(WeatherSensor.id == generic_asset_id) .one_or_none() ) if sensor is not None: @@ -421,21 +482,25 @@ def depopulate_data( @as_transaction -def depopulate_forecasts( - db: SQLAlchemy, generic_asset_type: str = None, generic_asset_name: str = None +def depopulate_prognoses( + db: SQLAlchemy, + generic_asset_type: Optional[str] = None, + generic_asset_id: Optional[id] = None, ): click.echo( - "Depopulating (time series) forecasts data from the database %s ..." % db.engine + "Depopulating (time series) forecasts and schedules data from the database %s ..." + % db.engine ) num_prices_deleted = 0 num_power_measurements_deleted = 0 num_weather_measurements_deleted = 0 - # Clear all forecasting jobs - num_jobs_deleted = app.queues["forecasting"].empty() + # Clear all jobs + num_forecasting_jobs_deleted = app.queues["forecasting"].empty() + num_scheduling_jobs_deleted = app.queues["scheduling"].empty() # Clear all forecasts (data with positive horizon) - if generic_asset_name is None: + if generic_asset_id is None: if generic_asset_type is None or generic_asset_type == "Market": num_prices_deleted = ( db.session.query(Price) @@ -456,14 +521,14 @@ def depopulate_forecasts( ) else: click.echo( - "Depopulating (time series) forecasts for %s from the database %s ..." - % (generic_asset_name, db.engine) + "Depopulating (time series) forecasts and schedules for %s from the database %s ..." + % (generic_asset_id, db.engine) ) if generic_asset_type == "Market": market = ( db.session.query(Market) - .filter(Market.name == generic_asset_name) + .filter(Market.id == generic_asset_id) .one_or_none() ) if market is not None: @@ -479,7 +544,7 @@ def depopulate_forecasts( if generic_asset_type == "Asset": asset = ( db.session.query(Asset) - .filter(Asset.name == generic_asset_name) + .filter(Asset.id == generic_asset_id) .one_or_none() ) if asset is not None: @@ -495,7 +560,7 @@ def depopulate_forecasts( if generic_asset_type == "WeatherSensor": sensor = ( db.session.query(WeatherSensor) - .filter(WeatherSensor.name == generic_asset_name) + .filter(WeatherSensor.id == generic_asset_id) .one_or_none() ) if sensor is not None: @@ -507,7 +572,8 @@ def depopulate_forecasts( ) else: num_weather_measurements_deleted = 0 - click.echo("Deleted %d Forecast Jobs" % num_jobs_deleted) + click.echo("Deleted %d Forecast Jobs" % num_forecasting_jobs_deleted) + click.echo("Deleted %d Schedule Jobs" % num_scheduling_jobs_deleted) click.echo("Deleted %d Price Forecasts" % num_prices_deleted) click.echo("Deleted %d Power Forecasts" % num_power_measurements_deleted) click.echo("Deleted %d Weather Forecasts" % num_weather_measurements_deleted) @@ -609,7 +675,7 @@ def load_tables( ) -def get_affected_classes(structure: bool = True, data: bool = False): +def get_affected_classes(structure: bool = True, data: bool = False) -> List: affected_classes = [] if structure: affected_classes += [ diff --git a/flexmeasures/data/services/users.py b/flexmeasures/data/services/users.py index f41b344d0..5a379567f 100644 --- a/flexmeasures/data/services/users.py +++ b/flexmeasures/data/services/users.py @@ -160,7 +160,11 @@ def remove_cookie_and_token_access(user: User): def delete_user(user: User): """ - Delete the user (and also his assets and power measurements!). Deleting oneself is not allowed. + Delete the user (and also his assets and power measurements!). + + The deletion cascades to the user's assets (sensors), and from there to the beliefs which reference these assets (sensors). + + Deleting oneself is not allowed. Remember to commit the session after calling this function! """ diff --git a/flexmeasures/ui/views/analytics.py b/flexmeasures/ui/views/analytics.py index 08d048586..bc35dfb93 100644 --- a/flexmeasures/ui/views/analytics.py +++ b/flexmeasures/ui/views/analytics.py @@ -609,7 +609,10 @@ def make_weather_figure( """Make a bokeh figure for weather data""" # Todo: plot average temperature/total_radiation/wind_speed for asset groups, and update title accordingly if weather_sensor is None: - return create_graph(pd.DataFrame()) + return create_graph( + pd.DataFrame(columns=["event_value"]), + title="Weather plot (no relevant weather sensor found)", + ) unit = weather_sensor.unit weather_axis_label = "%s (in %s)" % ( humanize(weather_sensor.sensor_type.display_name), diff --git a/flexmeasures/utils/entity_address_utils.py b/flexmeasures/utils/entity_address_utils.py index 057dfc987..6231e98fa 100644 --- a/flexmeasures/utils/entity_address_utils.py +++ b/flexmeasures/utils/entity_address_utils.py @@ -1,10 +1,11 @@ +import logging from typing import Optional, Union from urllib.parse import urlparse import re from tldextract import extract as tld_extract from tldextract.tldextract import ExtractResult as TldExtractResult -from flask import request, current_app +from flask import request, current_app, has_request_context from flexmeasures.utils.time_utils import get_first_day_of_next_month @@ -59,9 +60,11 @@ def build_entity_address( Returns the address as string. """ if host is None: - # TODO: Assume localhost if request is not given either (for tests/simulations), - # or should we raise? - host = urlparse(request.url).netloc + if has_request_context(): + host = urlparse(request.url).netloc + else: + # Assume localhost (for CLI/tests/simulations) + host = "localhost" def build_field(field: str, required: bool = True): if required and field not in entity_info: @@ -224,7 +227,7 @@ def build_ea_scheme_and_naming_authority( If not given nor configured, host_auth_start_month is the start of the next month for localhost. """ - domain_parts: TldExtractResult = tld_extract(host) + domain_parts: TldExtractResult = get_domain_parts(host) if host_auth_start_month is None: config_var_domain_key = ".".join( @@ -267,7 +270,7 @@ def reverse_domain_name(domain: Union[str, TldExtractResult]) -> str: You can pass in a string domain or an extraction result from tldextract """ if isinstance(domain, str): - domain_parts = tld_extract(domain) + domain_parts: TldExtractResult = get_domain_parts(domain) else: domain_parts = domain @@ -287,6 +290,16 @@ def reverse_domain_name(domain: Union[str, TldExtractResult]) -> str: return f"{suffix}{domain}{reversed_subdomain}" +def get_domain_parts(domain: str) -> TldExtractResult: + """wrapper for calling tldextract as it logs things about file locks we don't care about.""" + logger = logging.getLogger() + level = logger.getEffectiveLevel() + logger.setLevel(logging.ERROR) + domain_parts: TldExtractResult = tld_extract(domain) + logging.getLogger().setLevel(level) + return domain_parts + + def _typed_regex_results(match, value_types) -> dict: return { k: v_type(v) if v is not None else v