class PVForecastPublisher(DataPublisher): def __init__(self, internal_topic_params, config, id, control_frequency, horizon_in_steps, dT_in_seconds, q): self.logger = MessageLogger.get_logger(__name__, id) self.pv_data = {} self.q = q self.control_frequency = control_frequency self.horizon_in_steps = horizon_in_steps self.dT_in_seconds = dT_in_seconds self.topic = "P_PV" self.redisDB = RedisDB() self.id = id try: super().__init__(True, internal_topic_params, config, control_frequency, id) except Exception as e: self.redisDB.set("Error mqtt" + self.id, True) self.logger.error(e) def get_data(self): # check if new data is available if not self.redisDB.get_bool(Constants.get_data_flow_key(self.id)): return None self.logger.debug("Getting PV data from Queue") if not self.q.empty(): try: new_data = self.q.get_nowait() self.logger.debug("new data "+str(new_data)) self.q.task_done() self.pv_data = new_data self.logger.debug("extract pv data") data = self.convert_to_senml() return data except Exception: self.logger.error("Queue empty") else: self.logger.debug("PV Queue empty") return None def convert_to_senml(self): meas = [] if len(self.pv_data) > 0: for row in self.pv_data: meas.append(self.get_senml_meas(float(row[1]), row[0])) doc = senml.SenMLDocument(meas) val = doc.to_json() return json.dumps(val) def get_senml_meas(self, value, time): if not isinstance(time, float): time = float(time.timestamp()) meas = senml.SenMLMeasurement() meas.time = time meas.value = value meas.name = self.topic return meas
class ControllerBase(ABC, threading.Thread): def __init__(self, id, solver_name, model_path, control_frequency, repetition, output_config, input_config_parser, config, horizon_in_steps, dT_in_seconds, optimization_type): super().__init__() pyomo_path = "/usr/src/app/logs/pyomo_" + str(id) if not os.path.exists(pyomo_path): os.makedirs(pyomo_path, mode=0o777, exist_ok=False) os.chmod(pyomo_path, 0o777) TempfileManager.tempdir = pyomo_path self.logger = MessageLogger.get_logger(__name__, id) self.logger.info("Initializing optimization controller " + id) self.id = id self.results = "" self.model_path = model_path self.solver_name = solver_name self.control_frequency = control_frequency self.repetition = repetition self.horizon_in_steps = horizon_in_steps self.dT_in_seconds = dT_in_seconds self.output_config = output_config self.input_config_parser = input_config_parser self.stopRequest = None #threading.Event() self.redisDB = RedisDB() self.lock_key = "id_lock" self.optimization_type = optimization_type self.stop_signal_key = "opt_stop_" + self.id self.finish_status_key = "finish_status_" + self.id self.redisDB.set(self.stop_signal_key, False) self.redisDB.set(self.finish_status_key, False) self.repetition_completed = False self.preprocess = False self.input = None self.output = None if "False" in self.redisDB.get("Error mqtt" + self.id): self.output = OutputController(self.id, self.output_config) if "False" in self.redisDB.get("Error mqtt" + self.id): self.input = InputController(self.id, self.input_config_parser, config, self.control_frequency, self.horizon_in_steps, self.dT_in_seconds) """try: # dynamic load of a class self.logger.info("This is the model path: " + self.model_path) module = self.path_import2(self.model_path) self.logger.info(getattr(module, 'Model')) self.my_class = getattr(module, 'Model') except Exception as e: self.logger.error(e) raise InvalidModelException("model is invalid/contains python syntax errors")""" # Importint a class dynamically def path_import2(self, absolute_path): spec = importlib.util.spec_from_file_location(absolute_path, absolute_path) module = spec.loader.load_module(spec.name) return module def join(self, timeout=None): #self.stopRequest.set() super(ControllerBase, self).join(timeout) def Stop(self): try: if self.input: self.input.Stop() self.logger.debug("Deleting input instances") #del self.input.inputPreprocess #del self.input except Exception as e: self.logger.error("error stopping input " + str(e)) try: if self.output: self.output.Stop() self.logger.debug("Deleting output instances") #del self.output except Exception as e: self.logger.error("error stopping output " + str(e)) #erasing files from pyomo #self.erase_pyomo_files() self.logger.debug("setting stop_signal_key") self.redisDB.set(self.stop_signal_key, True) if self.isAlive(): self.join(1) def initialize_opt_solver(self): start_time_total = time.time() self.optsolver = SolverFactory( self.solver_name ) #, tee=False, keepfiles=False, verbose=False, load_solutions=False) # , solver_io="lp") #self.optsolver.verbose= False #self.optsolver.load_solutions = False self.logger.debug("Solver factory: " + str(self.optsolver)) #self.optsolver.options.tee=False #self.optsolver.options.keepfiles = False #self.optsolver.options.load_solutions = False # optsolver.options["max_iter"]=5000 self.logger.info("solver instantiated with " + self.solver_name) #return self.optsolver def initialize_solver_manager(self): ###create a solver manager self.solver_manager = None #self.solver_manager = SolverManagerFactory('pyro', host='localhost') self.logger.debug("Starting the solver_manager") #return self.solver_manager # optsolver.options.pyro_shutdown = True def erase_pyomo_files(self, folder): # erasing files from pyomo #folder = "/usr/src/app/logs/pyomo" for the_file in os.listdir(folder): file_path = os.path.join(folder, the_file) try: if os.path.isfile(file_path): os.unlink(file_path) # elif os.path.isdir(file_path): shutil.rmtree(file_path) except Exception as e: self.logger.error(e) # Start the optimization process and gives back a result def run(self): self.logger.info("Starting optimization controller") return_msg = "success" execution_error = False try: count = 0 self.optimize(count, self.solver_name, self.model_path) except Exception as e: execution_error = True self.logger.error("error overall " + str(e)) e = str(e) solver_error = "The SolverFactory was unable to create the solver" if solver_error in e: i = e.index(solver_error) i_start = e.index("\"", i) i_end = e.index("\"", i_start + 1) solver = e[i_start + 1:i_end] return_msg = "Incorrect solver " + str(solver) + " used" else: return_msg = e finally: self.logger.info("repetition completed " + str(self.repetition_completed)) self.logger.info("stop request " + str(self.redisDB.get_bool(self.stop_signal_key))) self.logger.info("execution error " + str(execution_error)) if not self.redisDB.get_bool( self.stop_signal_key ) and not self.repetition_completed and not execution_error: self.logger.error("Process interrupted") self.redisDB.set("kill_signal", True) #erase pyomo folder folder = "/usr/src/app/logs/pyomo_" + str(self.id) shutil.rmtree(folder, ignore_errors=True) # If Stop signal arrives it tries to disconnect all mqtt clients if self.output: for key, object in self.output.mqtt.items(): object.MQTTExit() self.logger.debug("Client " + key + " is being disconnected") self.logger.info(return_msg) self.redisDB.set(self.finish_status_key, True) return return_msg @abstractmethod def optimize(self, count, solver_name, model_path): while not self.redisDB.get_bool(self.stop_signal_key): pass def get_finish_status(self): return self.redisDB.get_bool(self.finish_status_key)
class ForecastPublisher(DataPublisher): def __init__(self, internal_topic_params, config, queue, publish_frequency, topic, id, horizon_in_steps, dT_in_seconds): self.logger = MessageLogger.get_logger(__name__, id) self.load_data = {} self.flag = True self.file_path = os.path.join("/usr/src/app", "optimization", "loadData.dat") self.q = queue self.topic = topic self.horizon_in_steps = horizon_in_steps self.dT_in_seconds = dT_in_seconds self.id = id self.redisDB = RedisDB() try: super().__init__(True, internal_topic_params, config, publish_frequency, id) except Exception as e: self.redisDB.set("Error mqtt" + self.id, True) self.logger.error(e) def get_data(self): try: if not self.redisDB.get_bool(Constants.get_data_flow_key(self.id)): return None # check if new data is available if not self.q.empty(): try: new_data = self.q.get_nowait() self.q.task_done() self.load_data = new_data except Exception: self.logger.debug("Queue empty") if not self.load_data: return None self.logger.debug("extract load data") data = self.extract_horizon_data() self.logger.debug(str(data)) return data except Exception as e: self.logger.error(str(e)) return None def extract_horizon_data(self): meas = [] list = self.load_data.items() list = sorted(list) list = list[-self.horizon_in_steps:] for i in range(self.horizon_in_steps): value = list[i][1] if value < 0: value = 0 meas.append(self.get_senml_meas(value, list[i][0])) doc = senml.SenMLDocument(meas) val = doc.to_json() return json.dumps(val) def get_senml_meas(self, value, time): if not isinstance(time, float): time = float(time.timestamp()) meas = senml.SenMLMeasurement() meas.time = time meas.value = value meas.name = self.topic return meas
class PVPrediction(threading.Thread): def __init__(self, config, output_config, input_config_parser, id, control_frequency, horizon_in_steps, dT_in_seconds, generic_name): super().__init__() self.logger = MessageLogger.get_logger(__name__, id) self.logger.debug("PV prediction class") self.stopRequest = threading.Event() self.config = config self.q = Queue(maxsize=0) self.generic_name = generic_name self.control_frequency = control_frequency self.control_frequency = int(self.control_frequency / 2) self.control_frequency = 60 self.id = id self.horizon_in_steps = horizon_in_steps self.dT_in_seconds = dT_in_seconds self.old_predictions = {} self.output_config = output_config self.influxDB = InfluxDBManager() self.raw_data_file_container = os.path.join( "/usr/src/app", "prediction/resources", self.id, "raw_data_" + str(generic_name) + ".csv") self.prediction_data_file_container = os.path.join( "/usr/src/app", "prediction/resources", self.id, "prediction_data_" + str(generic_name) + ".csv") self.error_result_file_path = os.path.join( "/usr/src/app", "prediction/resources", self.id, "error_data_" + str(generic_name) + ".csv") self.redisDB = RedisDB() raw_pv_data_topic = input_config_parser.get_params(generic_name) opt_values = input_config_parser.get_optimization_values() city = "Bonn" country = "Germany" try: city = opt_values["City"][None] country = opt_values["Country"][None] except Exception: self.logger.error("City or country not present in pv meta") location = {"city": city, "country": country} self.maxPV = float(opt_values["PV_Inv_Max_Power"][None]) pv_forecast_topic = config.get("IO", "forecast.topic") pv_forecast_topic = json.loads(pv_forecast_topic) pv_forecast_topic[ "topic"] = pv_forecast_topic["topic"] + self.generic_name self.radiation = Radiation(config, self.maxPV, dT_in_seconds, location, horizon_in_steps) self.max_file_size_mins = config.getint("IO", "pv.raw.data.file.size", fallback=10800) self.copy_prediction_file_data_to_influx() from prediction.rawLoadDataReceiver import RawLoadDataReceiver self.raw_data = RawLoadDataReceiver(raw_pv_data_topic, config, 1, self.raw_data_file_container, generic_name, self.id, False, self.max_file_size_mins, self.influxDB) self.pv_forecast_pub = PVForecastPublisher(pv_forecast_topic, config, id, 60, horizon_in_steps, dT_in_seconds, self.q) self.pv_forecast_pub.start() self.prediction_save_thread = threading.Thread( target=self.save_to_file_cron) self.prediction_save_thread.start() from prediction.errorReporting import ErrorReporting error_topic_params = config.get("IO", "error.topic") error_topic_params = json.loads(error_topic_params) error_topic_params[ "topic"] = error_topic_params["topic"] + generic_name self.error_reporting = ErrorReporting( config, id, generic_name, dT_in_seconds, control_frequency, horizon_in_steps, self.prediction_data_file_container, self.raw_data_file_container, error_topic_params, self.error_result_file_path, self.output_config, self.influxDB) self.error_reporting.start() def Stop(self): self.logger.debug("Stopping pv forecast thread") self.stopRequest.set() if self.pv_forecast_pub is not None: self.pv_forecast_pub.Stop() if self.raw_data is not None: self.raw_data.exit() if self.error_reporting: self.error_reporting.Stop() self.logger.debug("pv prediction thread exit") def run(self): self.logger.debug("Running pv prediction") while not self.stopRequest.is_set(): if not self.redisDB.get_bool(Constants.get_data_flow_key(self.id)): time.sleep(30) continue self.logger.debug("pv prediction data flow true") try: start = time.time() data = self.raw_data.get_raw_data() self.logger.debug("pv data in run is " + str(data)) if len(data) > 0: value = data[0][1] current_timestamp = data[0][0] self.logger.debug("pv received timestamp " + str(current_timestamp) + " val " + str(value)) base_data = self.radiation.get_data(current_timestamp) shifted_base_data = TimeSeries.shift_by_timestamp( base_data, current_timestamp, self.dT_in_seconds) self.logger.debug("base_data = " + str(shifted_base_data)) adjusted_data = self.adjust_data(shifted_base_data, value, current_timestamp) predicted_data = self.extract_horizon_data(adjusted_data) self.logger.debug("pv predicted timestamp " + str(predicted_data[0][0])) if predicted_data is not None and len(predicted_data) > 0: self.q.put(predicted_data) self.old_predictions[int( predicted_data[0][0])] = predicted_data start = self.control_frequency - (time.time() - start) if start > 0: time.sleep(start) except Exception as e: self.logger.error( str(self.generic_name) + " prediction thread exception " + str(e)) def adjust_data(self, shifted_base_data, value, current_timestamp): new_data = [] if len(shifted_base_data) > 0: closest_index = self.find_closest_prev_timestamp( shifted_base_data, current_timestamp) self.logger.debug("closest index = " + str(closest_index)) base_value = shifted_base_data[closest_index][1] #if value < 1: #value = 1 factor = value - base_value self.logger.debug("closest index value = " + str(base_value) + " mqtt value = " + str(value) + " factor = " + str(factor)) for row in shifted_base_data: new_value = row[1] + factor if new_value < 0: new_value = 0 if new_value > self.maxPV * 1000: new_value = self.maxPV * 1000 new_data.append([row[0], new_value]) self.logger.debug("new_data = " + str(new_data)) return new_data def find_closest_prev_timestamp(self, data, date): closest = 0 for i, item in enumerate(data, 0): if item[0] <= date: closest = i else: break return closest def find_closest_next_timestamp(self, data, date): closest = len(data) - 1 for i in reversed(range(len(data))): item = data[i] if item[0] > date: closest = i else: break return closest def extract_horizon_data(self, predicted_data): new_data = [] if len(predicted_data) > 0: current_timestamp = datetime.datetime.now().timestamp() closest_index = self.find_closest_prev_timestamp( predicted_data, current_timestamp) for i in range(self.horizon_in_steps): row = predicted_data[closest_index] new_data.append([row[0], row[1]]) closest_index += 1 if closest_index >= len(predicted_data): closest_index = 0 return new_data else: return None def save_to_file_cron(self): self.logger.debug("Started save file cron") while True and not self.stopRequest.is_set(): self.old_predictions = PredictionDataManager.save_predictions_dict_to_influx( self.influxDB, self.old_predictions, self.horizon_in_steps, self.generic_name, self.id) time.sleep(UtilFunctions.get_sleep_secs(1, 0, 0)) #time.sleep(UtilFunctions.get_sleep_secs(0, 2, 0)) def copy_prediction_file_data_to_influx(self): data_file = PredictionDataManager.get_prediction_data( self.prediction_data_file_container, self.generic_name) if len(data_file) > 0: data = PredictionDataManager.save_predictions_dict_to_influx( self.influxDB, data_file, self.horizon_in_steps, self.generic_name, self.id) if len(data) == 0: PredictionDataManager.del_predictions_to_file( self.prediction_data_file_container, self.generic_name)