def test_invalid_or_no_unit(client, method, message): if method == "GET": get_meter_data_response = client.get( url_for("flexmeasures_api_v1.get_meter_data"), query_string=message, headers={ "Authorization": get_auth_token( client, "*****@*****.**", "testtest" ) }, ) elif method == "POST": get_meter_data_response = client.post( url_for("flexmeasures_api_v1.get_meter_data"), json=message, headers={ "Authorization": get_auth_token( client, "*****@*****.**", "testtest" ) }, ) else: get_meter_data_response = [] assert get_meter_data_response.status_code == 400 assert get_meter_data_response.json["type"] == "GetMeterDataResponse" assert ( get_meter_data_response.json["status"] == invalid_unit("power", ["MW"])[0]["status"] )
def decorated_service(*args, **kwargs): form = get_form_from_request(request) if form is None: current_app.logger.warning( "Unsupported request method for unpacking 'unit' from request." ) return invalid_method(request.method) elif "unit" not in form: current_app.logger.warning("Request is missing unit.") return invalid_unit(quantity, units) elif form["unit"] not in units: current_app.logger.warning( "Unit %s is not accepted as one of %s." % (form["unit"], units) ) return invalid_unit(quantity, units) else: kwargs["unit"] = form["unit"] return fn(*args, **kwargs)
def wrapper(*args, **kwargs): form = get_form_from_request(request) if form is None: current_app.logger.warning( "Unsupported request method for unpacking 'unit' from request." ) return invalid_method(request.method) if "unit" in form: unit = form["unit"] else: current_app.logger.warning("Request missing 'unit'.") return invalid_unit(quantity=None, units=None) kwargs["unit"] = unit return fn(*args, **kwargs)
def test_post_weather_data_invalid_unit(client, post_message): """ Try to post wind speed data as a logged-in test user with the Supplier role, but with a wrong unit for wind speed, which should fail. """ # post weather data auth_token = get_auth_token(client, "*****@*****.**", "testtest") post_weather_data_response = client.post( url_for("flexmeasures_api_v1_1.post_weather_data"), json=post_message, headers={"Authorization": auth_token}, ) print("Server responded with:\n%s" % post_weather_data_response.json) assert post_weather_data_response.status_code == 400 assert post_weather_data_response.json["type"] == "PostWeatherDataResponse" assert ( post_weather_data_response.json["message"] == invalid_unit( "wind speed", ["m/s"])[0]["message"] ) # also checks that any underscore in the physical or economic quantity should be replaced with a space
def test_post_price_data_invalid_unit(client, post_message): """ Try to post price data with the wrong unit, which should fail. """ # post price data auth_token = get_auth_token(client, "*****@*****.**", "testtest") post_price_data_response = client.post( url_for("flexmeasures_api_v1_1.post_price_data"), json=post_message, headers={"Authorization": auth_token}, ) print("Server responded with:\n%s" % post_price_data_response.json) assert post_price_data_response.status_code == 400 assert post_price_data_response.json["type"] == "PostPriceDataResponse" market = parse_entity_address(post_message["market"], "market") market_name = market["market_name"] market = Market.query.filter_by(name=market_name).one_or_none() assert (post_price_data_response.json["message"] == invalid_unit( "%s prices" % market.display_name, ["EUR/MWh"])[0]["message"])
def post_price_data_response( unit, generic_asset_name_groups, horizon, rolling, value_groups, start, duration, resolution, ): current_app.logger.info("POSTING PRICE DATA") data_source = get_or_create_source(current_user) price_df_per_market = [] forecasting_jobs = [] for market_group, value_group in zip(generic_asset_name_groups, value_groups): for market in market_group: # Parse the entity address try: ea = parse_entity_address(market, entity_type="market", fm_scheme="fm0") except EntityAddressException as eae: return invalid_domain(str(eae)) market_name = ea["market_name"] # Look for the Sensor object sensor = get_sensor_by_unique_name(market_name, ["day_ahead", "tou_tariff"]) if is_response_tuple(sensor): # Error message telling the user what to do return sensor if unit != sensor.unit: return invalid_unit("%s prices" % sensor.name, [sensor.unit]) # Create new Price objects beliefs = [] for j, value in enumerate(value_group): dt = start + j * duration / len(value_group) if rolling: h = horizon else: # Deduct the difference in end times of the individual timeslot and the timeseries duration h = horizon - ( (start + duration) - (dt + duration / len(value_group)) ) p = TimedBelief( event_start=dt, event_value=value, belief_horizon=h, sensor=sensor, source=data_source, ) beliefs.append(p) price_df_per_market.append(tb.BeliefsDataFrame(beliefs)) # Make forecasts, but not in play mode. Price forecasts (horizon>0) can still lead to other price forecasts, # by the way, due to things like day-ahead markets. if current_app.config.get("FLEXMEASURES_MODE", "") != "play": # Forecast 24 and 48 hours ahead for at most the last 24 hours of posted price data forecasting_jobs = create_forecasting_jobs( sensor.id, max(start, start + duration - timedelta(hours=24)), start + duration, resolution=duration / len(value_group), horizons=[timedelta(hours=24), timedelta(hours=48)], enqueue=False, # will enqueue later, after saving data ) return save_and_enqueue(price_df_per_market, forecasting_jobs)
def post_weather_data_response( # noqa: C901 unit, generic_asset_name_groups, horizon, rolling, value_groups, start, duration, resolution, ): current_app.logger.info("POSTING WEATHER DATA") data_source = get_or_create_source(current_user) weather_df_per_sensor = [] forecasting_jobs = [] for sensor_group, value_group in zip(generic_asset_name_groups, value_groups): for sensor in sensor_group: # Parse the entity address try: ea = parse_entity_address( sensor, entity_type="weather_sensor", fm_scheme="fm0" ) except EntityAddressException as eae: return invalid_domain(str(eae)) weather_sensor_type_name = ea["weather_sensor_type_name"] latitude = ea["latitude"] longitude = ea["longitude"] # Check whether the unit is valid for this sensor type (e.g. no m/s allowed for temperature data) accepted_units = valid_sensor_units(weather_sensor_type_name) if unit not in accepted_units: return invalid_unit(weather_sensor_type_name, accepted_units) sensor = get_sensor_by_generic_asset_type_and_location( weather_sensor_type_name, latitude, longitude ) if is_response_tuple(sensor): # Error message telling the user about the nearest weather sensor they can post to return sensor # Create new Weather objects beliefs = [] for j, value in enumerate(value_group): dt = start + j * duration / len(value_group) if rolling: h = horizon else: # Deduct the difference in end times of the individual timeslot and the timeseries duration h = horizon - ( (start + duration) - (dt + duration / len(value_group)) ) w = TimedBelief( event_start=dt, event_value=value, belief_horizon=h, sensor=sensor, source=data_source, ) beliefs.append(w) weather_df_per_sensor.append(tb.BeliefsDataFrame(beliefs)) # make forecasts, but only if the sent-in values are not forecasts themselves (and also not in play) if current_app.config.get( "FLEXMEASURES_MODE", "" ) != "play" and horizon <= timedelta( hours=0 ): # Todo: replace 0 hours with whatever the moment of switching from ex-ante to ex-post is for this sensor forecasting_jobs.extend( create_forecasting_jobs( sensor.id, start, start + duration, resolution=duration / len(value_group), enqueue=False, # will enqueue later, after saving data ) ) return save_and_enqueue(weather_df_per_sensor, forecasting_jobs)
def post_price_data_response( unit, generic_asset_name_groups, horizon, rolling, value_groups, start, duration, resolution, ): current_app.logger.info("POSTING PRICE DATA") data_source = get_or_create_user_data_source(current_user) prices = [] forecasting_jobs = [] for market_group, value_group in zip(generic_asset_name_groups, value_groups): for market in market_group: # Parse the entity address try: ea = parse_entity_address(market, entity_type="market") except EntityAddressException as eae: return invalid_domain(str(eae)) market_name = ea["market_name"] # Look for the Market object market = Market.query.filter( Market.name == market_name).one_or_none() if market is None: return unrecognized_market(market_name) elif unit != market.unit: return invalid_unit("%s prices" % market.display_name, [market.unit]) # Create new Price objects for j, value in enumerate(value_group): dt = start + j * duration / len(value_group) if rolling: h = horizon else: # Deduct the difference in end times of the individual timeslot and the timeseries duration h = horizon - ((start + duration) - (dt + duration / len(value_group))) p = Price( datetime=dt, value=value, horizon=h, market_id=market.id, data_source_id=data_source.id, ) prices.append(p) # Make forecasts, but not in play mode. Price forecasts (horizon>0) can still lead to other price forecasts, # by the way, due to things like day-ahead markets. if current_app.config.get("FLEXMEASURES_MODE", "") != "play": # Forecast 24 and 48 hours ahead for at most the last 24 hours of posted price data forecasting_jobs = create_forecasting_jobs( "Price", market.id, max(start, start + duration - timedelta(hours=24)), start + duration, resolution=duration / len(value_group), horizons=[timedelta(hours=24), timedelta(hours=48)], enqueue= False, # will enqueue later, only if we successfully saved prices ) # Put these into the database current_app.logger.info("SAVING TO DB...") try: save_to_session(prices) db.session.flush() [ current_app.queues["forecasting"].enqueue_job(job) for job in forecasting_jobs ] db.session.commit() return request_processed() except IntegrityError as e: current_app.logger.warning(e) db.session.rollback() # Allow price data to be replaced only in play mode if current_app.config.get("FLEXMEASURES_MODE", "") == "play": save_to_session(prices, overwrite=True) [ current_app.queues["forecasting"].enqueue_job(job) for job in forecasting_jobs ] db.session.commit() return request_processed() else: return already_received_and_successfully_processed()
def post_weather_data_response( # noqa: C901 unit, generic_asset_name_groups, horizon, rolling, value_groups, start, duration, resolution, ): current_app.logger.info("POSTING WEATHER DATA") data_source = get_or_create_user_data_source(current_user) weather_measurements = [] forecasting_jobs = [] for sensor_group, value_group in zip(generic_asset_name_groups, value_groups): for sensor in sensor_group: # Parse the entity address try: ea = parse_entity_address(sensor, entity_type="sensor") except EntityAddressException as eae: return invalid_domain(str(eae)) weather_sensor_type_name = ea["weather_sensor_type_name"] latitude = ea["latitude"] longitude = ea["longitude"] # Check whether the unit is valid for this sensor type (e.g. no m/s allowed for temperature data) accepted_units = valid_sensor_units(weather_sensor_type_name) if unit not in accepted_units: return invalid_unit(weather_sensor_type_name, accepted_units) weather_sensor = get_weather_sensor_by(weather_sensor_type_name, latitude, longitude) # Create new Weather objects for j, value in enumerate(value_group): dt = start + j * duration / len(value_group) if rolling: h = horizon else: # Deduct the difference in end times of the individual timeslot and the timeseries duration h = horizon - ((start + duration) - (dt + duration / len(value_group))) w = Weather( datetime=dt, value=value, horizon=h, sensor_id=weather_sensor.id, data_source_id=data_source.id, ) weather_measurements.append(w) # make forecasts, but only if the sent-in values are not forecasts themselves (and also not in play) if current_app.config.get( "FLEXMEASURES_MODE", "" ) != "play" and horizon <= timedelta( hours=0 ): # Todo: replace 0 hours with whatever the moment of switching from ex-ante to ex-post is for this generic asset forecasting_jobs.extend( create_forecasting_jobs( "Weather", weather_sensor.id, start, start + duration, resolution=duration / len(value_group), horizons=[horizon], enqueue= False, # will enqueue later, only if we successfully saved weather measurements )) # Put these into the database current_app.logger.info("SAVING TO DB...") try: save_to_session(weather_measurements) db.session.flush() [ current_app.queues["forecasting"].enqueue_job(job) for job in forecasting_jobs ] db.session.commit() return request_processed() except IntegrityError as e: current_app.logger.warning(e) db.session.rollback() # Allow meter data to be replaced only in play mode if current_app.config.get("FLEXMEASURES_MODE", "") == "play": save_to_session(weather_measurements, overwrite=True) [ current_app.queues["forecasting"].enqueue_job(job) for job in forecasting_jobs ] db.session.commit() return request_processed() else: return already_received_and_successfully_processed()
def post_price_data_response( # noqa C901 unit, generic_asset_name_groups, horizon, prior, value_groups, start, duration, resolution, ) -> ResponseTuple: # additional validation, todo: to be moved into Marshmallow if horizon is None and prior is None: extra_info = "Missing horizon or prior." return invalid_horizon(extra_info) current_app.logger.info("POSTING PRICE DATA") data_source = get_or_create_source(current_user) price_df_per_market = [] forecasting_jobs = [] for market_group, event_values in zip(generic_asset_name_groups, value_groups): for market in market_group: # Parse the entity address try: ea = parse_entity_address(market, entity_type="market") except EntityAddressException as eae: return invalid_domain(str(eae)) sensor_id = ea["sensor_id"] # Look for the Sensor object sensor = Sensor.query.filter(Sensor.id == sensor_id).one_or_none() if sensor is None: return unrecognized_market(sensor_id) elif unit != sensor.unit: return invalid_unit("%s prices" % sensor.name, [sensor.unit]) # Convert to timely-beliefs terminology event_starts, belief_horizons = determine_belief_timing( event_values, start, resolution, horizon, prior, sensor ) # Create new Price objects beliefs = [ TimedBelief( event_start=event_start, event_value=event_value, belief_horizon=belief_horizon, sensor=sensor, source=data_source, ) for event_start, event_value, belief_horizon in zip( event_starts, event_values, belief_horizons ) ] price_df_per_market.append(tb.BeliefsDataFrame(beliefs)) # Make forecasts, but not in play mode. Price forecasts (horizon>0) can still lead to other price forecasts, # by the way, due to things like day-ahead markets. if current_app.config.get("FLEXMEASURES_MODE", "") != "play": # Forecast 24 and 48 hours ahead for at most the last 24 hours of posted price data forecasting_jobs = create_forecasting_jobs( sensor.id, max(start, start + duration - timedelta(hours=24)), start + duration, resolution=duration / len(event_values), horizons=[timedelta(hours=24), timedelta(hours=48)], enqueue=False, # will enqueue later, after saving data ) return save_and_enqueue(price_df_per_market, forecasting_jobs)
def post_weather_data_response( # noqa: C901 unit, generic_asset_name_groups, horizon, prior, value_groups, start, duration, resolution, ) -> ResponseTuple: # additional validation, todo: to be moved into Marshmallow if horizon is None and prior is None: extra_info = "Missing horizon or prior." return invalid_horizon(extra_info) current_app.logger.info("POSTING WEATHER DATA") data_source = get_or_create_source(current_user) weather_df_per_sensor = [] forecasting_jobs = [] for sensor_group, event_values in zip(generic_asset_name_groups, value_groups): for sensor in sensor_group: # Parse the entity address try: ea = parse_entity_address(sensor, entity_type="weather_sensor") except EntityAddressException as eae: return invalid_domain(str(eae)) weather_sensor_type_name = ea["weather_sensor_type_name"] latitude = ea["latitude"] longitude = ea["longitude"] # Check whether the unit is valid for this sensor type (e.g. no m/s allowed for temperature data) accepted_units = valid_sensor_units(weather_sensor_type_name) if unit not in accepted_units: return invalid_unit(weather_sensor_type_name, accepted_units) sensor: Sensor = get_sensor_by_generic_asset_type_and_location( weather_sensor_type_name, latitude, longitude ) # Convert to timely-beliefs terminology event_starts, belief_horizons = determine_belief_timing( event_values, start, resolution, horizon, prior, sensor ) # Create new Weather objects beliefs = [ TimedBelief( event_start=event_start, event_value=event_value, belief_horizon=belief_horizon, sensor=sensor, source=data_source, ) for event_start, event_value, belief_horizon in zip( event_starts, event_values, belief_horizons ) ] weather_df_per_sensor.append(tb.BeliefsDataFrame(beliefs)) # make forecasts, but only if the sent-in values are not forecasts themselves (and also not in play) if current_app.config.get( "FLEXMEASURES_MODE", "" ) != "play" and horizon <= timedelta( hours=0 ): # Todo: replace 0 hours with whatever the moment of switching from ex-ante to ex-post is for this generic asset forecasting_jobs.extend( create_forecasting_jobs( sensor.id, start, start + duration, resolution=duration / len(event_values), horizons=[horizon], enqueue=False, # will enqueue later, after saving data ) ) return save_and_enqueue(weather_df_per_sensor, forecasting_jobs)