def add_test_weather_sensor_and_forecasts(db: SQLAlchemy, setup_generic_asset_types): """one day of test data (one complete sine curve) for two sensors""" data_source = DataSource.query.filter_by( name="Seita", type="demo script" ).one_or_none() weather_station = GenericAsset( name="Test weather station farther away", generic_asset_type=setup_generic_asset_types["weather_station"], latitude=100, longitude=100, ) for sensor_name, unit in (("irradiance", "kW/m²"), ("wind speed", "m/s")): sensor = Sensor(name=sensor_name, generic_asset=weather_station, unit=unit) db.session.add(sensor) time_slots = pd.date_range( datetime(2015, 1, 1), datetime(2015, 1, 2, 23, 45), freq="15T" ) values = [random() * (1 + np.sin(x / 15)) for x in range(len(time_slots))] if sensor_name == "temperature": values = [value * 17 for value in values] if sensor_name == "wind speed": values = [value * 45 for value in values] if sensor_name == "irradiance": values = [value * 600 for value in values] for dt, val in zip(time_slots, values): db.session.add( TimedBelief( sensor=sensor, event_start=as_server_time(dt), event_value=val, belief_horizon=timedelta(hours=6), source=data_source, ) )
def battery_soc_sensor(db: SQLAlchemy, setup_generic_assets): """Add a battery SOC sensor.""" soc_sensor = Sensor( name="state of charge", unit="%", generic_asset=setup_generic_assets["test_battery"], ) db.session.add(soc_sensor) return soc_sensor
def add_gas_sensor(db, test_supplier_user): incineration_type = GenericAssetType(name="waste incinerator", ) db.session.add(incineration_type) db.session.flush() incineration_asset = GenericAsset( name="incineration line", generic_asset_type=incineration_type, account_id=test_supplier_user.account_id, ) db.session.add(incineration_asset) db.session.flush() gas_sensor = Sensor( name="some gas sensor", unit="m³/h", event_resolution=timedelta(minutes=10), generic_asset=incineration_asset, ) db.session.add(gas_sensor) gas_sensor.owner = test_supplier_user.account
def add_nearby_weather_sensors(db, add_weather_sensors) -> Dict[str, Sensor]: temp_sensor_location = add_weather_sensors["temperature"].generic_asset.location weather_station_type = GenericAssetType.query.filter( GenericAssetType.name == "weather station" ).one_or_none() farther_weather_station = GenericAsset( name="Test weather station farther away", generic_asset_type=weather_station_type, latitude=temp_sensor_location[0], longitude=temp_sensor_location[1] + 0.1, ) db.session.add(farther_weather_station) farther_temp_sensor = Sensor( name="temperature", generic_asset=farther_weather_station, event_resolution=timedelta(minutes=5), unit="°C", ) db.session.add(farther_temp_sensor) even_farther_weather_station = GenericAsset( name="Test weather station even farther away", generic_asset_type=weather_station_type, latitude=temp_sensor_location[0], longitude=temp_sensor_location[1] + 0.2, ) db.session.add(even_farther_weather_station) even_farther_temp_sensor = Sensor( name="temperature", generic_asset=even_farther_weather_station, event_resolution=timedelta(minutes=5), unit="°C", ) db.session.add(even_farther_temp_sensor) add_weather_sensors["farther_temperature"] = farther_temp_sensor add_weather_sensors["even_farther_temperature"] = even_farther_temp_sensor return add_weather_sensors
def test_closest_sensor(run_as_cli, add_nearby_weather_sensors): """Check that the closest temperature sensor to our wind sensor returns the one that is on the same spot as the wind sensor itself. (That's where we set it up in our conftest.) And check that the 2nd and 3rd closest are the farther temperature sensors we set up. """ wind_sensor = add_nearby_weather_sensors["wind"] closest_sensors = Sensor.find_closest( generic_asset_type_name=wind_sensor.generic_asset.generic_asset_type. name, n=3, sensor_name="temperature", latitude=wind_sensor.generic_asset.latitude, longitude=wind_sensor.generic_asset.longitude, ) assert closest_sensors[0].location == wind_sensor.generic_asset.location assert closest_sensors[1] == add_nearby_weather_sensors[ "farther_temperature"] assert closest_sensors[2] == add_nearby_weather_sensors[ "even_farther_temperature"] for sensor in closest_sensors: assert (sensor.generic_asset.generic_asset_type.name == wind_sensor.generic_asset.generic_asset_type.name)
def get_weather_data( assets: List[Asset], metrics: dict, sensor_type: WeatherSensorType, query_window: Tuple[datetime, datetime], resolution: str, forecast_horizon: timedelta, ) -> Tuple[pd.DataFrame, pd.DataFrame, str, Sensor, dict]: """Get most recent weather data and forecast weather data for the requested forecast horizon. Return weather observations, weather forecasts (either might be an empty DataFrame), the name of the sensor type, the weather sensor and a dict with the following metrics: - expected value - mean absolute error - mean absolute percentage error - weighted absolute percentage error""" # Todo: for now we only collect weather data for a single asset asset = assets[0] weather_data = tb.BeliefsDataFrame(columns=["event_value"]) weather_forecast_data = tb.BeliefsDataFrame(columns=["event_value"]) sensor_type_name = "" closest_sensor = None if sensor_type: # Find the 50 closest weather sensors sensor_type_name = sensor_type.name closest_sensors = Sensor.find_closest( generic_asset_type_name=asset.generic_asset.generic_asset_type. name, sensor_name=sensor_type_name, n=50, object=asset, ) if closest_sensors: closest_sensor = closest_sensors[0] # Collect the weather data for the requested time window sensor_names = [sensor.name for sensor in closest_sensors] # Get weather data weather_bdf_dict: Dict[str, tb.BeliefsDataFrame] = TimedBelief.search( sensor_names, event_starts_after=query_window[0], event_ends_before=query_window[1], resolution=resolution, horizons_at_least=None, horizons_at_most=timedelta(hours=0), sum_multiple=False, ) weather_df_dict: Dict[str, pd.DataFrame] = {} for sensor_name in weather_bdf_dict: weather_df_dict[sensor_name] = simplify_index( weather_bdf_dict[sensor_name], index_levels_to_columns=["belief_horizon", "source"], ) # Get weather forecasts weather_forecast_bdf_dict: Dict[ str, tb.BeliefsDataFrame] = TimedBelief.search( sensor_names, event_starts_after=query_window[0], event_ends_before=query_window[1], resolution=resolution, horizons_at_least=forecast_horizon, horizons_at_most=None, source_types=["user", "forecasting script", "script"], sum_multiple=False, ) weather_forecast_df_dict: Dict[str, pd.DataFrame] = {} for sensor_name in weather_forecast_bdf_dict: weather_forecast_df_dict[sensor_name] = simplify_index( weather_forecast_bdf_dict[sensor_name], index_levels_to_columns=["belief_horizon", "source"], ) # Take the closest weather sensor which contains some data for the selected time window for sensor, sensor_name in zip(closest_sensors, sensor_names): if (not weather_df_dict[sensor_name] ["event_value"].isnull().values.all() or not weather_forecast_df_dict[sensor_name] ["event_value"].isnull().values.all()): closest_sensor = sensor break weather_data = weather_df_dict[sensor_name] weather_forecast_data = weather_forecast_df_dict[sensor_name] # Calculate the weather metrics if not weather_data.empty: metrics["realised_weather"] = weather_data["event_value"].mean( ) else: metrics["realised_weather"] = np.NaN if (not weather_forecast_data.empty and weather_forecast_data.size == weather_data.size): metrics["expected_weather"] = weather_forecast_data[ "event_value"].mean() metrics["mae_weather"] = calculations.mean_absolute_error( weather_data["event_value"], weather_forecast_data["event_value"]) metrics[ "mape_weather"] = calculations.mean_absolute_percentage_error( weather_data["event_value"], weather_forecast_data["event_value"]) metrics[ "wape_weather"] = calculations.weighted_absolute_percentage_error( weather_data["event_value"], weather_forecast_data["event_value"]) else: metrics["expected_weather"] = np.NaN metrics["mae_weather"] = np.NaN metrics["mape_weather"] = np.NaN metrics["wape_weather"] = np.NaN return ( weather_data, weather_forecast_data, sensor_type_name, closest_sensor, metrics, )
def get_schedule(self, sensor: Sensor, job_id: str, duration: timedelta, **kwargs): """Get a schedule from FlexMeasures. .. :quickref: Schedule; Download schedule from the platform **Optional fields** - "duration" (6 hours by default; can be increased to plan further into the future) **Example response** This message contains a schedule indicating to consume at various power rates from 10am UTC onwards for a duration of 45 minutes. .. sourcecode:: json { "values": [ 2.15, 3, 2 ], "start": "2015-06-02T10:00:00+00:00", "duration": "PT45M", "unit": "MW" } :reqheader Authorization: The authentication token :reqheader Content-Type: application/json :resheader Content-Type: application/json :status 200: PROCESSED :status 400: INVALID_TIMEZONE, INVALID_DOMAIN, INVALID_UNIT, UNKNOWN_SCHEDULE, UNRECOGNIZED_CONNECTION_GROUP :status 401: UNAUTHORIZED :status 403: INVALID_SENDER :status 405: INVALID_METHOD :status 422: UNPROCESSABLE_ENTITY """ planning_horizon = min( # type: ignore duration, current_app.config.get("FLEXMEASURES_PLANNING_HORIZON")) # Look up the scheduling job connection = current_app.queues["scheduling"].connection try: # First try the scheduling queue job = Job.fetch(job_id, connection=connection) except NoSuchJobError: return unrecognized_event(job_id, "job") if job.is_finished: error_message = "A scheduling job has been processed with your job ID, but " elif job.is_failed: # Try to inform the user on why the job failed e = job.meta.get( "exception", Exception( "The job does not state why it failed. " "The worker may be missing an exception handler, " "or its exception handler is not storing the exception as job meta data." ), ) return unknown_schedule( f"Scheduling job failed with {type(e).__name__}: {e}") elif job.is_started: return unknown_schedule("Scheduling job in progress.") elif job.is_queued: return unknown_schedule("Scheduling job waiting to be processed.") elif job.is_deferred: try: preferred_job = job.dependency except NoSuchJobError: return unknown_schedule( "Scheduling job waiting for unknown job to be processed.") return unknown_schedule( f'Scheduling job waiting for {preferred_job.status} job "{preferred_job.id}" to be processed.' ) else: return unknown_schedule("Scheduling job has an unknown status.") schedule_start = job.kwargs["start"] schedule_data_source_name = "Seita" scheduler_source = DataSource.query.filter_by( name="Seita", type="scheduling script").one_or_none() if scheduler_source is None: return unknown_schedule( error_message + f'no data is known from "{schedule_data_source_name}".') power_values = sensor.search_beliefs( event_starts_after=schedule_start, event_ends_before=schedule_start + planning_horizon, source=scheduler_source, most_recent_beliefs_only=True, one_deterministic_belief_per_event=True, ) # For consumption schedules, positive values denote consumption. For the db, consumption is negative consumption_schedule = -simplify_index(power_values)["event_value"] if consumption_schedule.empty: return unknown_schedule( error_message + "the schedule was not found in the database.") # Update the planning window resolution = sensor.event_resolution start = consumption_schedule.index[0] duration = min(duration, consumption_schedule.index[-1] + resolution - start) consumption_schedule = consumption_schedule[start:start + duration - resolution] response = dict( values=consumption_schedule.tolist(), start=isodate.datetime_isoformat(start), duration=isodate.duration_isoformat(duration), unit=sensor.unit, ) d, s = request_processed() return dict(**response, **d), s
def upgrade_data(): """Data migration to update the ids of old sensors.""" # To support data upgrade, cascade upon updating ids recreate_sensor_fks(recreate_with_cascade_on_update=True) # Declare ORM table views t_assets = sa.Table( "asset", sa.MetaData(), sa.Column("id", sa.Integer), sa.Column("name", sa.String(80)), ) t_markets = sa.Table( "market", sa.MetaData(), sa.Column("id", sa.Integer), sa.Column("name", sa.String(80)), ) t_weather_sensors = sa.Table( "weather_sensor", sa.MetaData(), sa.Column("id", sa.Integer), sa.Column("name", sa.String(80)), ) # Use SQLAlchemy's connection and transaction to go through the data connection = op.get_bind() # Get the max id used by assets and markets max_asset_id = get_max_id(connection, "asset") max_market_id = get_max_id(connection, "market") max_weather_sensor_id = get_max_id(connection, "weather_sensor") # Select all existing ids that need migrating, while keeping names intact asset_results = connection.execute( sa.select([ t_assets.c.id, t_assets.c.name, ])).fetchall() market_results = connection.execute( sa.select([ t_markets.c.id, t_markets.c.name, ])).fetchall() weather_sensor_results = connection.execute( sa.select([ t_weather_sensors.c.id, t_weather_sensors.c.name, ])).fetchall() # Prepare to build a list of new sensors new_sensors = [] # Iterate over all assets for id_, name in asset_results: # Determine the new id new_id = id_ # assets keep their original ids # Create new Sensors with matching ids new_sensor = Sensor(name=name) new_sensor.id = new_id new_sensors.append(new_sensor) # Iterate over all markets for id_, name in market_results: # Determine the new id new_id = id_ + max_asset_id # Update the id connection.execute(t_markets.update().where( t_markets.c.name == name).values(id=new_id)) # Create new Sensors with matching ids new_sensor = Sensor(name=name) new_sensor.id = new_id new_sensors.append(new_sensor) # Iterate over all weather sensors for id_, name in weather_sensor_results: # Determine the new id new_id = id_ + max_asset_id + max_market_id # Update the id connection.execute(t_weather_sensors.update().where( t_weather_sensors.c.name == name).values(id=new_id)) # Create new Sensors with matching ids new_sensor = Sensor(name=name) new_sensor.id = new_id new_sensors.append(new_sensor) # Add the new sensors session = orm.Session(bind=connection) session.add_all(new_sensors) session.commit() # After supporting data upgrade, stop cascading upon updating ids recreate_sensor_fks(recreate_with_cascade_on_update=False) # Finally, help out the autoincrement of the Sensor table t_sensors = sa.Table( "sensor", sa.MetaData(), sa.Column("id", sa.Integer), ) sequence_name = "%s_id_seq" % t_sensors.name # Set next id for table seq to just after max id of all old sensors combined connection.execute("SELECT setval('%s', %s, true);" % (sequence_name, max_asset_id + max_market_id + max_weather_sensor_id + 1))
def __init__(self, **kwargs): super(WeatherSensor, self).__init__(**kwargs) # Create a new Sensor with unique id across assets, markets and weather sensors if "id" not in kwargs: weather_sensor_type = get_old_model_type( kwargs, WeatherSensorType, "weather_sensor_type_name", "sensor_type", # NB not "weather_sensor_type" (slight inconsistency in this old sensor class) ) generic_asset_kwargs = { **kwargs, **copy_old_sensor_attributes( self, old_sensor_type_attributes=[], old_sensor_attributes=[ "display_name", ], old_sensor_type=weather_sensor_type, ), } new_generic_asset = create_generic_asset( "weather_sensor", **generic_asset_kwargs ) new_sensor = Sensor( name=kwargs["name"], generic_asset=new_generic_asset, **copy_old_sensor_attributes( self, old_sensor_type_attributes=[ "daily_seasonality", "weekly_seasonality", "yearly_seasonality", ], old_sensor_attributes=[ "display_name", ], old_sensor_type=weather_sensor_type, ), ) db.session.add(new_sensor) db.session.flush() # generates the pkey for new_sensor new_sensor_id = new_sensor.id else: # The UI may initialize WeatherSensor objects from API form data with a known id new_sensor_id = kwargs["id"] self.id = new_sensor_id # Copy over additional columns from (newly created) WeatherSensor to (newly created) Sensor if "id" not in kwargs: db.session.add(self) db.session.flush() # make sure to generate each column for the old sensor new_sensor.unit = self.unit new_sensor.event_resolution = self.event_resolution new_sensor.knowledge_horizon_fnc = self.knowledge_horizon_fnc new_sensor.knowledge_horizon_par = self.knowledge_horizon_par
def __init__(self, **kwargs): # Set default knowledge horizon function for an economic sensor if "knowledge_horizon_fnc" not in kwargs: kwargs[ "knowledge_horizon_fnc"] = knowledge_horizons.ex_ante.__name__ if "knowledge_horizon_par" not in kwargs: kwargs["knowledge_horizon_par"] = { knowledge_horizons.ex_ante.__code__.co_varnames[1]: "PT0H" } kwargs["name"] = kwargs["name"].replace(" ", "_").lower() if "display_name" not in kwargs: kwargs["display_name"] = humanize(kwargs["name"]) super(Market, self).__init__(**kwargs) # Create a new Sensor with unique id across assets, markets and weather sensors if "id" not in kwargs: market_type = get_old_model_type(kwargs, MarketType, "market_type_name", "market_type") generic_asset_kwargs = { **kwargs, **copy_old_sensor_attributes( self, old_sensor_type_attributes=[], old_sensor_attributes=[ "display_name", ], old_sensor_type=market_type, ), } new_generic_asset = create_generic_asset("market", **generic_asset_kwargs) new_sensor = Sensor( name=kwargs["name"], generic_asset=new_generic_asset, **copy_old_sensor_attributes( self, old_sensor_type_attributes=[ "daily_seasonality", "weekly_seasonality", "yearly_seasonality", ], old_sensor_attributes=[ "display_name", ], old_sensor_type=market_type, ), ) db.session.add(new_sensor) db.session.flush() # generates the pkey for new_sensor new_sensor_id = new_sensor.id else: # The UI may initialize Market objects from API form data with a known id new_sensor_id = kwargs["id"] self.id = new_sensor_id # Copy over additional columns from (newly created) Market to (newly created) Sensor if "id" not in kwargs: db.session.add(self) db.session.flush( ) # make sure to generate each column for the old sensor new_sensor.unit = self.unit new_sensor.event_resolution = self.event_resolution new_sensor.knowledge_horizon_fnc = self.knowledge_horizon_fnc new_sensor.knowledge_horizon_par = self.knowledge_horizon_par
def schedule_charging_station( sensor: Sensor, start: datetime, end: datetime, resolution: timedelta, soc_at_start: float, soc_targets: pd.Series, soc_min: Optional[float] = None, soc_max: Optional[float] = None, roundtrip_efficiency: Optional[float] = None, prefer_charging_sooner: bool = True, price_sensor: Optional[Sensor] = None, round_to_decimals: Optional[int] = 6, ) -> Union[pd.Series, None]: """Schedule a charging station asset based directly on the latest beliefs regarding market prices within the specified time window. For the resulting consumption schedule, consumption is defined as positive values. Todo: handle uni-directional charging by setting the "min" or "derivative min" constraint to 0 """ # Check for required Sensor attributes sensor.check_required_attributes([("capacity_in_mw", (float, int))]) # Check for round-trip efficiency if roundtrip_efficiency is None: # Get default from sensor, or use 100% otherwise roundtrip_efficiency = sensor.get_attribute("roundtrip_efficiency", 1) if roundtrip_efficiency <= 0 or roundtrip_efficiency > 1: raise ValueError( "roundtrip_efficiency expected within the interval (0, 1]") # Check for min and max SOC, or get default from sensor if soc_min is None: # Can't drain the EV battery by more than it contains soc_min = sensor.get_attribute("min_soc_in_mwh", 0) if soc_max is None: # Lacking information about the battery's nominal capacity, we use the highest target value as the maximum state of charge soc_max = sensor.get_attribute("max_soc_in_mwh", max(soc_targets.values)) # Check for known prices or price forecasts, trimming planning window accordingly prices, (start, end) = get_prices( (start, end), resolution, price_sensor=price_sensor, sensor=sensor, allow_trimmed_query_window=True, ) # soc targets are at the end of each time slot, while prices are indexed by the start of each time slot soc_targets = soc_targets.tz_convert("UTC") start = pd.Timestamp(start).tz_convert("UTC") end = pd.Timestamp(end).tz_convert("UTC") soc_targets = soc_targets[start + resolution:end] # Add tiny price slope to prefer charging now rather than later, and discharging later rather than now. # We penalise the future with at most 1 per thousand times the price spread. if prefer_charging_sooner: prices = add_tiny_price_slope(prices, "event_value") # Set up commitments to optimise for commitment_quantities = [initialize_series(0, start, end, resolution)] # Todo: convert to EUR/(deviation of commitment, which is in MW) commitment_upwards_deviation_price = [ prices.loc[start:end - resolution]["event_value"] ] commitment_downwards_deviation_price = [ prices.loc[start:end - resolution]["event_value"] ] # Set up device constraints (only one device for this EMS) columns = [ "equals", "max", "min", "derivative equals", "derivative max", "derivative min", ] device_constraints = [initialize_df(columns, start, end, resolution)] device_constraints[0]["equals"] = soc_targets.shift( -1, freq=resolution ).values * (timedelta(hours=1) / resolution) - soc_at_start * ( timedelta(hours=1) / resolution ) # shift "equals" constraint for target SOC by one resolution (the target defines a state at a certain time, # while the "equals" constraint defines what the total stock should be at the end of a time slot, # where the time slot is indexed by its starting time) device_constraints[0]["min"] = (soc_min - soc_at_start) * ( timedelta(hours=1) / resolution) device_constraints[0]["max"] = (soc_max - soc_at_start) * ( timedelta(hours=1) / resolution) if sensor.get_attribute("is_strictly_non_positive"): device_constraints[0]["derivative min"] = 0 else: device_constraints[0]["derivative min"] = ( sensor.get_attribute("capacity_in_mw") * -1) if sensor.get_attribute("is_strictly_non_negative"): device_constraints[0]["derivative max"] = 0 else: device_constraints[0]["derivative max"] = sensor.get_attribute( "capacity_in_mw") # Apply round-trip efficiency evenly to charging and discharging device_constraints[0][ "derivative down efficiency"] = roundtrip_efficiency**0.5 device_constraints[0][ "derivative up efficiency"] = roundtrip_efficiency**0.5 # Set up EMS constraints (no additional constraints) columns = ["derivative max", "derivative min"] ems_constraints = initialize_df(columns, start, end, resolution) ems_schedule, expected_costs, scheduler_results = device_scheduler( device_constraints, ems_constraints, commitment_quantities, commitment_downwards_deviation_price, commitment_upwards_deviation_price, ) if scheduler_results.solver.termination_condition == "infeasible": # Fallback policy if the problem was unsolvable charging_station_schedule = fallback_charging_policy( sensor, device_constraints[0], start, end, resolution) else: charging_station_schedule = ems_schedule[0] # Round schedule if round_to_decimals: charging_station_schedule = charging_station_schedule.round( round_to_decimals) return charging_station_schedule
def __init__(self, **kwargs): if "unit" not in kwargs: kwargs["unit"] = "MW" # current default super(Asset, self).__init__(**kwargs) # Create a new Sensor with unique id across assets, markets and weather sensors # Also keep track of ownership by creating a GenericAsset and assigning the new Sensor to it. if "id" not in kwargs: asset_type = get_old_model_type( kwargs, AssetType, "asset_type_name", "asset_type" ) # Set up generic asset generic_asset_kwargs = { **kwargs, **copy_old_sensor_attributes( self, old_sensor_type_attributes=[ "can_curtail", "can_shift", ], old_sensor_attributes=[ "display_name", "min_soc_in_mwh", "max_soc_in_mwh", "soc_in_mwh", "soc_datetime", "soc_udi_event_id", ], old_sensor_type=asset_type, ), } if "owner_id" in kwargs: owner = User.query.get(kwargs["owner_id"]) if owner: generic_asset_kwargs.update(account_id=owner.account_id) new_generic_asset = create_generic_asset("asset", **generic_asset_kwargs) # Set up sensor new_sensor = Sensor( name=kwargs["name"], generic_asset=new_generic_asset, **copy_old_sensor_attributes( self, old_sensor_type_attributes=[ "is_consumer", "is_producer", "daily_seasonality", "weekly_seasonality", "yearly_seasonality", "weather_correlations", ], old_sensor_attributes=[ "display_name", "capacity_in_mw", "market_id", ], old_sensor_type=asset_type, ), ) db.session.add(new_sensor) db.session.flush() # generates the pkey for new_sensor sensor_id = new_sensor.id else: # The UI may initialize Asset objects from API form data with a known id sensor_id = kwargs["id"] self.id = sensor_id if self.unit != "MW": raise Exception("FlexMeasures only supports MW as unit for now.") self.name = self.name.replace(" (MW)", "") if "display_name" not in kwargs: self.display_name = humanize(self.name) # Copy over additional columns from (newly created) Asset to (newly created) Sensor if "id" not in kwargs: db.session.add(self) db.session.flush() # make sure to generate each column for the old sensor new_sensor.unit = self.unit new_sensor.event_resolution = self.event_resolution new_sensor.knowledge_horizon_fnc = self.knowledge_horizon_fnc new_sensor.knowledge_horizon_par = self.knowledge_horizon_par
def get_latest_power_as_plot(sensor: Sensor, small: bool = False) -> Tuple[str, str]: """ Create a plot of a sensor's latest power measurement as an embeddable html string (incl. javascript). First returned string is the measurement time, second string is the html string. Assumes that the sensor has the "capacity_in_mw" attribute. TODO: move to Altair. """ if current_app.config.get("FLEXMEASURES_MODE", "") == "play": before = None # type:ignore else: before = server_now() _, before = convert_query_window_for_demo((before, before)) latest_power = sensor.latest_state() if not latest_power.empty: latest_power_value = latest_power["event_value"].values[0] if current_app.config.get("FLEXMEASURES_MODE", "") == "demo": latest_power_datetime = ( latest_power.event_ends[0] .to_pydatetime() .replace(year=datetime.now().year) ) else: latest_power_datetime = latest_power.event_ends[0].to_pydatetime() latest_measurement_time_str = localized_datetime_str( latest_power_datetime + sensor.event_resolution ) else: latest_power_value = 0 latest_measurement_time_str = "time unknown" if latest_power_value < 0: consumption = True latest_power_value *= -1 else: consumption = False capacity_in_mw = sensor.get_attribute("capacity_in_mw", latest_power_value) data = { latest_measurement_time_str if not small else "": [0], "Capacity in use": [latest_power_value], "Remaining capacity": [capacity_in_mw - latest_power_value], } if capacity_in_mw > 0: percentage_capacity = latest_power_value / capacity_in_mw else: percentage_capacity = 0 df = pd.DataFrame(data) p = df.plot_bokeh( kind="bar", x=latest_measurement_time_str if not small else "", y=["Capacity in use", "Remaining capacity"], stacked=True, colormap=[ "%s" % Color( hue=0.3 * min(1.0, 3 / 2 * percentage_capacity), saturation=1, luminance=min(0.5, 1 - percentage_capacity * 3 / 4), ).get_hex_l(), # 0% red, 38% yellow, 67% green, >67% darker green "#f7ebe7", ], alpha=0.7, title=None, xlabel=None, ylabel="Power (%s)" % sensor.unit, zooming=False, show_figure=False, hovertool=None, legend=None, toolbar_location=None, figsize=(200, 400) if not small else (100, 100), ylim=(0, capacity_in_mw), xlim=(-0.5, 0.5), ) p.xgrid.visible = False for r in p.renderers: try: r.glyph.width = 1 except AttributeError: pass p.xaxis.ticker = [] p.add_layout(BoxAnnotation(bottom=0, top=capacity_in_mw, fill_color="#f7ebe7")) plot_html_str = pandas_bokeh.embedded_html(p) hover_tool_str = "%s at %s %s (%s%% capacity).\nLatest state at %s." % ( "Consuming" if consumption else "Running" if latest_power_value == 0 else "Producing", round(latest_power_value, 3), sensor.unit, round(100 * percentage_capacity), latest_measurement_time_str, ) return ( latest_measurement_time_str, """<div data-toggle="tooltip" data-placement="bottom" title="%s">%s</div>""" % (hover_tool_str, plot_html_str), )