Пример #1
0
    def post_data(self, bdf: BeliefsDataFrame):
        """
        Post sensor data to FlexMeasures.

        .. :quickref: Data; Upload sensor data

        **Example request**

        .. code-block:: json

            {
                "sensor": "ea1.2021-01.io.flexmeasures:fm1.1",
                "values": [-11.28, -11.28, -11.28, -11.28],
                "start": "2021-06-07T00:00:00+02:00",
                "duration": "PT1H",
                "unit": "m³/h"
            }

        The above request posts four values for a duration of one hour, where the first
        event start is at the given start time, and subsequent values start in 15 minute intervals throughout the one hour duration.

        The sensor is the one with ID=1.
        The unit has to be convertible to the sensor's unit.
        The resolution of the data has to match the sensor's required resolution, but
        FlexMeasures will attempt to upsample lower resolutions.

        :reqheader Authorization: The authentication token
        :reqheader Content-Type: application/json
        :resheader Content-Type: application/json
        :status 200: PROCESSED
        :status 400: INVALID_REQUEST
        :status 401: UNAUTHORIZED
        :status 403: INVALID_SENDER
        :status 422: UNPROCESSABLE_ENTITY
        """
        response, code = save_and_enqueue(bdf)
        return response, code
Пример #2
0
def post_price_data_response(
    unit,
    generic_asset_name_groups,
    horizon,
    rolling,
    value_groups,
    start,
    duration,
    resolution,
):

    current_app.logger.info("POSTING PRICE DATA")

    data_source = get_or_create_source(current_user)
    price_df_per_market = []
    forecasting_jobs = []
    for market_group, value_group in zip(generic_asset_name_groups, value_groups):
        for market in market_group:

            # Parse the entity address
            try:
                ea = parse_entity_address(market, entity_type="market", fm_scheme="fm0")
            except EntityAddressException as eae:
                return invalid_domain(str(eae))
            market_name = ea["market_name"]

            # Look for the Sensor object
            sensor = get_sensor_by_unique_name(market_name, ["day_ahead", "tou_tariff"])
            if is_response_tuple(sensor):
                # Error message telling the user what to do
                return sensor
            if unit != sensor.unit:
                return invalid_unit("%s prices" % sensor.name, [sensor.unit])

            # Create new Price objects
            beliefs = []
            for j, value in enumerate(value_group):
                dt = start + j * duration / len(value_group)
                if rolling:
                    h = horizon
                else:  # Deduct the difference in end times of the individual timeslot and the timeseries duration
                    h = horizon - (
                        (start + duration) - (dt + duration / len(value_group))
                    )
                p = TimedBelief(
                    event_start=dt,
                    event_value=value,
                    belief_horizon=h,
                    sensor=sensor,
                    source=data_source,
                )
                beliefs.append(p)
            price_df_per_market.append(tb.BeliefsDataFrame(beliefs))

            # Make forecasts, but not in play mode. Price forecasts (horizon>0) can still lead to other price forecasts,
            # by the way, due to things like day-ahead markets.
            if current_app.config.get("FLEXMEASURES_MODE", "") != "play":
                # Forecast 24 and 48 hours ahead for at most the last 24 hours of posted price data
                forecasting_jobs = create_forecasting_jobs(
                    sensor.id,
                    max(start, start + duration - timedelta(hours=24)),
                    start + duration,
                    resolution=duration / len(value_group),
                    horizons=[timedelta(hours=24), timedelta(hours=48)],
                    enqueue=False,  # will enqueue later, after saving data
                )

    return save_and_enqueue(price_df_per_market, forecasting_jobs)
Пример #3
0
def post_weather_data_response(  # noqa: C901
    unit,
    generic_asset_name_groups,
    horizon,
    rolling,
    value_groups,
    start,
    duration,
    resolution,
):

    current_app.logger.info("POSTING WEATHER DATA")

    data_source = get_or_create_source(current_user)
    weather_df_per_sensor = []
    forecasting_jobs = []
    for sensor_group, value_group in zip(generic_asset_name_groups, value_groups):
        for sensor in sensor_group:

            # Parse the entity address
            try:
                ea = parse_entity_address(
                    sensor, entity_type="weather_sensor", fm_scheme="fm0"
                )
            except EntityAddressException as eae:
                return invalid_domain(str(eae))
            weather_sensor_type_name = ea["weather_sensor_type_name"]
            latitude = ea["latitude"]
            longitude = ea["longitude"]

            # Check whether the unit is valid for this sensor type (e.g. no m/s allowed for temperature data)
            accepted_units = valid_sensor_units(weather_sensor_type_name)
            if unit not in accepted_units:
                return invalid_unit(weather_sensor_type_name, accepted_units)

            sensor = get_sensor_by_generic_asset_type_and_location(
                weather_sensor_type_name, latitude, longitude
            )
            if is_response_tuple(sensor):
                # Error message telling the user about the nearest weather sensor they can post to
                return sensor

            # Create new Weather objects
            beliefs = []
            for j, value in enumerate(value_group):
                dt = start + j * duration / len(value_group)
                if rolling:
                    h = horizon
                else:  # Deduct the difference in end times of the individual timeslot and the timeseries duration
                    h = horizon - (
                        (start + duration) - (dt + duration / len(value_group))
                    )
                w = TimedBelief(
                    event_start=dt,
                    event_value=value,
                    belief_horizon=h,
                    sensor=sensor,
                    source=data_source,
                )
                beliefs.append(w)
            weather_df_per_sensor.append(tb.BeliefsDataFrame(beliefs))

            # make forecasts, but only if the sent-in values are not forecasts themselves (and also not in play)
            if current_app.config.get(
                "FLEXMEASURES_MODE", ""
            ) != "play" and horizon <= timedelta(
                hours=0
            ):  # Todo: replace 0 hours with whatever the moment of switching from ex-ante to ex-post is for this sensor
                forecasting_jobs.extend(
                    create_forecasting_jobs(
                        sensor.id,
                        start,
                        start + duration,
                        resolution=duration / len(value_group),
                        enqueue=False,  # will enqueue later, after saving data
                    )
                )

    return save_and_enqueue(weather_df_per_sensor, forecasting_jobs)
Пример #4
0
def post_price_data_response(  # noqa C901
    unit,
    generic_asset_name_groups,
    horizon,
    prior,
    value_groups,
    start,
    duration,
    resolution,
) -> ResponseTuple:

    # additional validation, todo: to be moved into Marshmallow
    if horizon is None and prior is None:
        extra_info = "Missing horizon or prior."
        return invalid_horizon(extra_info)

    current_app.logger.info("POSTING PRICE DATA")

    data_source = get_or_create_source(current_user)
    price_df_per_market = []
    forecasting_jobs = []
    for market_group, event_values in zip(generic_asset_name_groups, value_groups):
        for market in market_group:

            # Parse the entity address
            try:
                ea = parse_entity_address(market, entity_type="market")
            except EntityAddressException as eae:
                return invalid_domain(str(eae))
            sensor_id = ea["sensor_id"]

            # Look for the Sensor object
            sensor = Sensor.query.filter(Sensor.id == sensor_id).one_or_none()
            if sensor is None:
                return unrecognized_market(sensor_id)
            elif unit != sensor.unit:
                return invalid_unit("%s prices" % sensor.name, [sensor.unit])

            # Convert to timely-beliefs terminology
            event_starts, belief_horizons = determine_belief_timing(
                event_values, start, resolution, horizon, prior, sensor
            )

            # Create new Price objects
            beliefs = [
                TimedBelief(
                    event_start=event_start,
                    event_value=event_value,
                    belief_horizon=belief_horizon,
                    sensor=sensor,
                    source=data_source,
                )
                for event_start, event_value, belief_horizon in zip(
                    event_starts, event_values, belief_horizons
                )
            ]
            price_df_per_market.append(tb.BeliefsDataFrame(beliefs))

            # Make forecasts, but not in play mode. Price forecasts (horizon>0) can still lead to other price forecasts,
            # by the way, due to things like day-ahead markets.
            if current_app.config.get("FLEXMEASURES_MODE", "") != "play":
                # Forecast 24 and 48 hours ahead for at most the last 24 hours of posted price data
                forecasting_jobs = create_forecasting_jobs(
                    sensor.id,
                    max(start, start + duration - timedelta(hours=24)),
                    start + duration,
                    resolution=duration / len(event_values),
                    horizons=[timedelta(hours=24), timedelta(hours=48)],
                    enqueue=False,  # will enqueue later, after saving data
                )

    return save_and_enqueue(price_df_per_market, forecasting_jobs)
Пример #5
0
def post_power_data(
    unit,
    generic_asset_name_groups,
    value_groups,
    horizon,
    prior,
    start,
    duration,
    resolution,
    create_forecasting_jobs_too,
):

    # additional validation, todo: to be moved into Marshmallow
    if horizon is None and prior is None:
        extra_info = "Missing horizon or prior."
        return invalid_horizon(extra_info)

    current_app.logger.info("POSTING POWER DATA")

    data_source = get_or_create_source(current_user)
    user_sensors = get_sensors()
    if not user_sensors:
        current_app.logger.info("User doesn't seem to have any assets")
    user_sensor_ids = [sensor.id for sensor in user_sensors]
    power_df_per_connection = []
    forecasting_jobs = []
    for connection_group, event_values in zip(generic_asset_name_groups, value_groups):
        for connection in connection_group:

            # TODO: get asset through util function after refactoring
            # Parse the entity address
            try:
                ea = parse_entity_address(connection, entity_type="connection")
            except EntityAddressException as eae:
                return invalid_domain(str(eae))
            sensor_id = ea["sensor_id"]

            # Look for the Sensor object
            if sensor_id in user_sensor_ids:
                sensor = Sensor.query.filter(Sensor.id == sensor_id).one_or_none()
            else:
                current_app.logger.warning("Cannot identify connection %s" % connection)
                return unrecognized_connection_group()

            # Validate the sign of the values (following USEF specs with positive consumption and negative production)
            if sensor.get_attribute("is_strictly_non_positive") and any(
                v < 0 for v in event_values
            ):
                extra_info = (
                    "Connection %s is registered as a pure consumer and can only receive non-negative values."
                    % sensor.entity_address
                )
                return power_value_too_small(extra_info)
            elif sensor.get_attribute("is_strictly_non_negative") and any(
                v > 0 for v in event_values
            ):
                extra_info = (
                    "Connection %s is registered as a pure producer and can only receive non-positive values."
                    % sensor.entity_address
                )
                return power_value_too_big(extra_info)

            # Convert to timely-beliefs terminology
            event_starts, belief_horizons = determine_belief_timing(
                event_values, start, resolution, horizon, prior, sensor
            )

            # Create new Power objects
            beliefs = [
                TimedBelief(
                    event_start=event_start,
                    event_value=event_value
                    * -1,  # Reverse sign for FlexMeasures specs with positive production and negative consumption
                    belief_horizon=belief_horizon,
                    sensor=sensor,
                    source=data_source,
                )
                for event_start, event_value, belief_horizon in zip(
                    event_starts, event_values, belief_horizons
                )
            ]
            power_df_per_connection.append(tb.BeliefsDataFrame(beliefs))

            if create_forecasting_jobs_too:
                forecasting_jobs.extend(
                    create_forecasting_jobs(
                        sensor_id,
                        start,
                        start + duration,
                        resolution=duration / len(event_values),
                        enqueue=False,  # will enqueue later, after saving data
                    )
                )

    return save_and_enqueue(power_df_per_connection, forecasting_jobs)
Пример #6
0
def post_weather_data_response(  # noqa: C901
    unit,
    generic_asset_name_groups,
    horizon,
    prior,
    value_groups,
    start,
    duration,
    resolution,
) -> ResponseTuple:
    # additional validation, todo: to be moved into Marshmallow
    if horizon is None and prior is None:
        extra_info = "Missing horizon or prior."
        return invalid_horizon(extra_info)

    current_app.logger.info("POSTING WEATHER DATA")

    data_source = get_or_create_source(current_user)
    weather_df_per_sensor = []
    forecasting_jobs = []
    for sensor_group, event_values in zip(generic_asset_name_groups, value_groups):
        for sensor in sensor_group:

            # Parse the entity address
            try:
                ea = parse_entity_address(sensor, entity_type="weather_sensor")
            except EntityAddressException as eae:
                return invalid_domain(str(eae))
            weather_sensor_type_name = ea["weather_sensor_type_name"]
            latitude = ea["latitude"]
            longitude = ea["longitude"]

            # Check whether the unit is valid for this sensor type (e.g. no m/s allowed for temperature data)
            accepted_units = valid_sensor_units(weather_sensor_type_name)
            if unit not in accepted_units:
                return invalid_unit(weather_sensor_type_name, accepted_units)

            sensor: Sensor = get_sensor_by_generic_asset_type_and_location(
                weather_sensor_type_name, latitude, longitude
            )

            # Convert to timely-beliefs terminology
            event_starts, belief_horizons = determine_belief_timing(
                event_values, start, resolution, horizon, prior, sensor
            )

            # Create new Weather objects
            beliefs = [
                TimedBelief(
                    event_start=event_start,
                    event_value=event_value,
                    belief_horizon=belief_horizon,
                    sensor=sensor,
                    source=data_source,
                )
                for event_start, event_value, belief_horizon in zip(
                    event_starts, event_values, belief_horizons
                )
            ]
            weather_df_per_sensor.append(tb.BeliefsDataFrame(beliefs))

            # make forecasts, but only if the sent-in values are not forecasts themselves (and also not in play)
            if current_app.config.get(
                "FLEXMEASURES_MODE", ""
            ) != "play" and horizon <= timedelta(
                hours=0
            ):  # Todo: replace 0 hours with whatever the moment of switching from ex-ante to ex-post is for this generic asset
                forecasting_jobs.extend(
                    create_forecasting_jobs(
                        sensor.id,
                        start,
                        start + duration,
                        resolution=duration / len(event_values),
                        horizons=[horizon],
                        enqueue=False,  # will enqueue later, after saving data
                    )
                )

    return save_and_enqueue(weather_df_per_sensor, forecasting_jobs)
Пример #7
0
def create_connection_and_value_groups(  # noqa: C901
    unit, generic_asset_name_groups, value_groups, horizon, rolling, start, duration
):
    """
    Code for POSTing Power values to the API.
    Only lets users post to assets they own.
    The sign of values is validated according to asset specs, but in USEF terms.
    Then, we store the reverse sign for FlexMeasures specs (with positive production
    and negative consumption).

    If power values are not forecasts, forecasting jobs are created.
    """

    current_app.logger.info("POSTING POWER DATA")

    data_source = get_or_create_source(current_user)
    user_sensors = get_sensors()
    if not user_sensors:
        current_app.logger.info("User doesn't seem to have any assets")
    user_sensor_ids = [sensor.id for sensor in user_sensors]
    power_df_per_connection = []
    forecasting_jobs = []
    for connection_group, value_group in zip(generic_asset_name_groups, value_groups):
        for connection in connection_group:

            # TODO: get asset through util function after refactoring
            # Parse the entity address
            try:
                connection = parse_entity_address(
                    connection, entity_type="connection", fm_scheme="fm0"
                )
            except EntityAddressException as eae:
                return invalid_domain(str(eae))
            sensor_id = connection["asset_id"]

            # Look for the Sensor object
            if sensor_id in user_sensor_ids:
                sensor = Sensor.query.filter(Sensor.id == sensor_id).one_or_none()
            else:
                current_app.logger.warning("Cannot identify connection %s" % connection)
                return unrecognized_connection_group()

            # Validate the sign of the values (following USEF specs with positive consumption and negative production)
            if sensor.get_attribute("is_strictly_non_positive") and any(
                v < 0 for v in value_group
            ):
                extra_info = (
                    "Connection %s is registered as a pure consumer and can only receive non-negative values."
                    % sensor.entity_address
                )
                return power_value_too_small(extra_info)
            elif sensor.get_attribute("is_strictly_non_negative") and any(
                v > 0 for v in value_group
            ):
                extra_info = (
                    "Connection %s is registered as a pure producer and can only receive non-positive values."
                    % sensor.entity_address
                )
                return power_value_too_big(extra_info)

            # Create a new BeliefsDataFrame
            beliefs = []
            for j, value in enumerate(value_group):
                dt = start + j * duration / len(value_group)
                if rolling:
                    h = horizon
                else:  # Deduct the difference in end times of the individual timeslot and the timeseries duration
                    h = horizon - (
                        (start + duration) - (dt + duration / len(value_group))
                    )
                p = TimedBelief(
                    event_start=dt,
                    event_value=value
                    * -1,  # Reverse sign for FlexMeasures specs with positive production and negative consumption
                    belief_horizon=h,
                    sensor=sensor,
                    source=data_source,
                )

                assert p not in db.session
                beliefs.append(p)
            power_df_per_connection.append(tb.BeliefsDataFrame(beliefs))

            # make forecasts, but only if the sent-in values are not forecasts themselves
            if horizon <= timedelta(
                hours=0
            ):  # Todo: replace 0 hours with whatever the moment of switching from ex-ante to ex-post is for this sensor
                forecasting_jobs.extend(
                    create_forecasting_jobs(
                        sensor_id,
                        start,
                        start + duration,
                        resolution=duration / len(value_group),
                        enqueue=False,  # will enqueue later, after saving data
                    )
                )

    return save_and_enqueue(power_df_per_connection, forecasting_jobs)