示例#1
0
def test_post_price_data(setup_api_test_data, db, app, clean_redis,
                         post_message):
    """
    Try to post price data as a logged-in test user with the Prosumer role, which should succeed.
    """
    # call with client whose context ends, so that we can test for,
    # after-effects in the database after teardown committed.
    with app.test_client() as client:
        # post price data
        auth_token = get_auth_token(client, "*****@*****.**",
                                    "testtest")
        post_price_data_response = client.post(
            url_for("flexmeasures_api_v1_1.post_price_data"),
            json=post_message,
            headers={"Authorization": auth_token},
        )
        print("Server responded with:\n%s" % post_price_data_response.json)
        assert post_price_data_response.status_code == 200
        assert post_price_data_response.json["type"] == "PostPriceDataResponse"

    verify_prices_in_db(post_message, post_message["values"], db)

    # look for Forecasting jobs in queue
    assert (len(app.queues["forecasting"]) == 2
            )  # only one market is affected, but two horizons
    horizons = [timedelta(hours=24), timedelta(hours=48)]
    jobs = sorted(app.queues["forecasting"].jobs,
                  key=lambda x: x.kwargs["horizon"])
    market = SensorField("market", "fm0").deserialize(post_message["market"])
    for job, horizon in zip(jobs, horizons):
        assert job.kwargs["horizon"] == horizon
        assert job.kwargs["start"] == parse_date(
            post_message["start"]) + horizon
        assert job.kwargs["sensor_id"] == market.id
示例#2
0
class SensorDataDescriptionSchema(ma.Schema):
    """
    Schema describing sensor data (specifically, the sensor and the timing of the data).
    """

    sensor = SensorField(required=True, entity_type="sensor", fm_scheme="fm1")
    start = AwareDateTimeField(required=True, format="iso")
    duration = DurationField(required=True)
    horizon = DurationField(required=False)
    prior = AwareDateTimeField(required=False, format="iso")
    unit = fields.Str(required=True)

    @validates_schema
    def check_schema_unit_against_sensor_unit(self, data, **kwargs):
        """Allows units compatible with that of the sensor.
        For example, a sensor with W units allows data to be posted with units:
        - W, kW, MW, etc. (i.e. units with different prefixes)
        - J/s, Nm/s, etc. (i.e. units that can be converted using some multiplier)
        - Wh, kWh, etc. (i.e. units that represent a stock delta, which knowing the duration can be converted to a flow)
        For compatible units, the SensorDataSchema converts values to the sensor's unit.
        """
        posted_unit = data["unit"]
        required_unit = data["sensor"].unit

        if posted_unit != required_unit and not units_are_convertible(
            posted_unit, required_unit
        ):
            raise ValidationError(
                f"Required unit for this sensor is {data['sensor'].unit}, got incompatible unit: {data['unit']}"
            )
示例#3
0
        def decorated_service(*args, **kwargs):
            kwargs[
                "resolution"] = None  # using this decorator means you can expect this attribute, None means default
            form = get_form_from_request(request)
            if form is None:
                current_app.logger.warning(
                    "Unsupported request method for unpacking 'resolution' from request."
                )
                return invalid_method(request.method)

            if "resolution" in form and form["resolution"]:
                ds_resolution = parse_duration(form["resolution"])
                if ds_resolution is None:
                    return invalid_resolution_str(form["resolution"])
                # Check if the resolution can be applied to all sensors (if it is a multiple
                # of the event_resolution(s) and thus downsampling is possible)
                for asset_group in kwargs["generic_asset_name_groups"]:
                    for asset_descriptor in asset_group:
                        sensor = SensorField(
                            entity_type,
                            fm_scheme).deserialize(asset_descriptor)
                        if sensor is None:
                            return unrecognized_asset()
                        sensor_resolution = sensor.event_resolution
                        if ds_resolution % sensor_resolution != timedelta(
                                minutes=0):
                            return unapplicable_resolution(
                                f"{isodate.duration_isoformat(sensor_resolution)} or a multiple hereof."
                            )
                kwargs["resolution"] = to_offset(
                    isodate.parse_duration(form["resolution"])
                ).freqstr  # Convert ISO period string to pandas frequency string

            return fn(*args, **kwargs)
示例#4
0
def test_sensor_field_straightforward(
    add_sensors,
    setup_markets,
    add_battery_assets,
    entity_address,
    entity_type,
    fm_scheme,
    exp_deserialization_name,
):
    """Testing straightforward cases"""
    sf = SensorField(entity_type, fm_scheme)
    deser = sf.deserialize(entity_address, None, None)
    assert deser.name == exp_deserialization_name
    if fm_scheme == "fm0" and entity_type in ("connection", "market", "weather_sensor"):
        # These entity types are deserialized to Sensors, which have no entity address under the fm0 scheme
        return
    assert sf.serialize(entity_type, {entity_type: deser}) == entity_address
示例#5
0
def verify_prices_in_db(post_message, values, db, swapped_sign: bool = False):
    """util method to verify that price data ended up in the database"""
    start = parse_datetime(post_message["start"])
    end = start + parse_duration(post_message["duration"])
    horizon = parse_duration(post_message["horizon"])
    sensor = SensorField("market", "fm0").deserialize(post_message["market"])
    resolution = sensor.event_resolution
    query = (db.session.query(
        TimedBelief.event_value, TimedBelief.belief_horizon).filter(
            (TimedBelief.event_start > start - resolution)
            & (TimedBelief.event_start < end)).filter(
                TimedBelief.belief_horizon == horizon -
                (end -
                 (TimedBelief.event_start + resolution))).join(Sensor).filter(
                     TimedBelief.sensor_id == Sensor.id).filter(
                         Sensor.name == sensor.name))
    df = pd.DataFrame(
        query.all(),
        columns=[col["name"] for col in query.column_descriptions])
    if swapped_sign:
        df["event_value"] = -df["event_value"]
    assert df["event_value"].tolist() == values
示例#6
0
def verify_sensor_data_in_db(
    post_message,
    values,
    db,
    entity_type: str,
    fm_scheme: str,
    swapped_sign: bool = False,
):
    """util method to verify that sensor data ended up in the database"""
    start = parse_datetime(post_message["start"])
    end = start + parse_duration(post_message["duration"])
    sensor: Sensor = SensorField(entity_type, fm_scheme).deserialize(
        post_message[entity_type])
    resolution = sensor.event_resolution
    query = (
        db.session.query(
            TimedBelief.event_start,
            TimedBelief.event_value,
            TimedBelief.belief_horizon,
        ).filter((TimedBelief.event_start > start - resolution)
                 & (TimedBelief.event_start < end))
        # .filter(TimedBelief.belief_horizon == (TimedBelief.event_start + resolution) - prior)  # only for sensors with 0-hour ex_post knowledge horizon function
        .join(Sensor).filter(Sensor.name == sensor.name))
    if "horizon" in post_message:
        horizon = parse_duration(post_message["horizon"])
        query = query.filter(TimedBelief.belief_horizon == horizon)
    # todo: after basing sensor data on TimedBelief, we should be able to get a BeliefsDataFrame from the query directly
    df = pd.DataFrame(
        query.all(),
        columns=[col["name"] for col in query.column_descriptions])
    bdf = tb.BeliefsDataFrame(df, sensor=sensor, source="Some source")
    if "prior" in post_message:
        prior = parse_datetime(post_message["prior"])
        bdf = bdf.fixed_viewpoint(prior)
    if swapped_sign:
        bdf["event_value"] = -bdf["event_value"]
    assert bdf["event_value"].tolist() == values
示例#7
0
def test_sensor_field_invalid(entity_address, entity_type, fm_scheme, error_msg):
    sf = SensorField(entity_type, fm_scheme)
    with pytest.raises(EntityAddressValidationError) as ve:
        sf.deserialize(entity_address, None, None)
    assert error_msg in str(ve)
示例#8
0
        def decorated_service(*args, **kwargs):
            form = get_form_from_request(request)
            if form is None:
                current_app.logger.warning(
                    "Unsupported request method for inferring resolution from request."
                )
                return invalid_method(request.method)

            if not all(key in kwargs for key in [
                    "value_groups",
                    "start",
                    "duration",
            ]):
                current_app.logger.warning("Could not infer resolution.")
                fields = ("values", "start", "duration")
                return required_info_missing(fields,
                                             "Resolution cannot be inferred.")
            if "generic_asset_name_groups" not in kwargs:
                return required_info_missing(
                    (entity_type),
                    "Required resolution cannot be found without asset info.",
                )

            # Calculating (inferring) the resolution in the POSTed data
            inferred_resolution = (
                (kwargs["start"] + kwargs["duration"]) -
                kwargs["start"]) / len(kwargs["value_groups"][0])

            # Finding the required resolution for sensors affected in this request
            required_resolution = None
            last_sensor = None
            for asset_group in kwargs["generic_asset_name_groups"]:
                for asset_descriptor in asset_group:
                    # Getting the sensor
                    sensor = SensorField(
                        entity_type, fm_scheme).deserialize(asset_descriptor)
                    if sensor is None:
                        return unrecognized_asset(
                            f"Failed to look up asset by {asset_descriptor}")
                    # Complain if sensors don't all require the same resolution
                    if (required_resolution is not None and
                            sensor.event_resolution != required_resolution):
                        return conflicting_resolutions(
                            f"Cannot send data for both {sensor} and {last_sensor}."
                        )
                    # Setting the resolution & remembering last looked-at sensor
                    required_resolution = sensor.event_resolution
                    last_sensor = sensor

            # if inferred resolution is a multiple from required_solution, we can upsample_values
            # todo: next line fails on sensors with 0 resolution
            if inferred_resolution % required_resolution == timedelta(hours=0):
                for i in range(len(kwargs["value_groups"])):
                    kwargs["value_groups"][i] = upsample_values(
                        kwargs["value_groups"][i],
                        from_resolution=inferred_resolution,
                        to_resolution=required_resolution,
                    )
                inferred_resolution = required_resolution

            if inferred_resolution != required_resolution:
                current_app.logger.warning(
                    f"Resolution {inferred_resolution} is not accepted. We require {required_resolution}."
                )
                return unapplicable_resolution(
                    isodate.duration_isoformat(required_resolution))
            else:
                kwargs["resolution"] = inferred_resolution
                return fn(*args, **kwargs)