def __init__(self, id, path): self.logger = MessageLogger.get_logger(__name__, id) self.id = id self.evs = {} self.chargers = {} self.total_charging_stations_power = 0 self.persist_real_data_file = path
def __init__(self, internal, topic_params, config, emptyValue={}, id=None, section=None, prepare_topic_qos=True, sub_pub=False): super().__init__() self.logger = MessageLogger.get_logger(__name__, id) self.stop_request = False self.internal = internal self.topic_params = topic_params self.prepare_topic_qos = prepare_topic_qos self.emptyValue = emptyValue self.data = self.emptyValue.copy() self.data_update = False self.config = config self.channel = "MQTT" self.topics = None self.port = None self.host_params = {} self.first_time = 0 self.last_time = 0 self.id = id self.section = section self.sub_pub = sub_pub if self.section is None: self.section = "IO" self.setup() if self.channel == "MQTT": self.init_mqtt(self.topics) elif self.channel == "ZMQ": self.init_zmq(self.topics)
def __init__(self, input_config_file, input_config_mqtt, model_name, id, optimization_type): self.logger = MessageLogger.get_logger(__name__, id) self.model_name = model_name self.model_variables, self.param_key_list = ModelParamsInfo.get_model_param( self.model_name) self.input_config_file = input_config_file self.input_config_mqtt = input_config_mqtt self.base, self.derived = ModelDerivedParameters.get_derived_parameter_mapping( model_name, optimization_type) self.mqtt_params = {} self.generic_names = [] self.generic_file_names = [] # self.defined_prediction_names = ["P_Load", "P_Load_R", "P_Load_S", "P_Load_T", "Q_Load_R", "Q_Load_S", "Q_Load_T", "Q_Load"] self.defined_prediction_names = [] self.defined_non_prediction_names = ["P_PV"] self.defined_external_names = ["SoC_Value"] self.defined_preprocess_names = [] self.defined_event_names = [] self.prediction_names = [] self.non_prediction_names = [] self.external_names = [] self.preprocess_names = [] self.event_names = [] self.set_params = {} self.config_parser_utils = ConfigParserUtils() self.extract_mqtt_params() self.car_park = None self.simulator = None self.optimization_params = self.extract_optimization_values() self.logger.debug("optimization_params: " + str(self.optimization_params)) self.logger.info("generic names = " + str(self.generic_names))
def __init__(self, config, id, topic_name, dT_in_seconds, control_frequency, horizon_in_steps, prediction_data_file_container, raw_data_file_container, topic_params, error_result_file_path, output_config, influxDB): self.logger = MessageLogger.get_logger(__name__, id) self.control_frequency = control_frequency self.horizon_in_steps = horizon_in_steps self.dT_in_seconds = dT_in_seconds self.raw_data_file_container = raw_data_file_container self.raw_data = RawDataReader() self.stopRequest = threading.Event() self.topic_name = topic_name self.id = id self.prediction_data_file_container = prediction_data_file_container self.error_result_file_path = error_result_file_path self.output_config = output_config self.topic_params = topic_params self.influxDB = influxDB redisDB = RedisDB() try: if self.update_topic_params(): super().__init__(False, self.topic_params, config, control_frequency, id) else: super().__init__(True, self.topic_params, config, control_frequency, id) except Exception as e: redisDB.set("Error mqtt" + self.id, True) self.logger.error(e)
def __init__(self, internal, topic_params, config, generic_name, id, buffer, dT, base_value_flag): redisDB = RedisDB() self.logger = MessageLogger.get_logger(__name__, id) self.generic_name = generic_name self.buffer = buffer self.dT = dT self.base_value_flag = base_value_flag if "detachable" in topic_params.keys(): self.detachable = topic_params["detachable"] else: self.detachable = False if "reuseable" in topic_params.keys(): self.reuseable = topic_params["reuseable"] else: self.reuseable = False self.start_of_day = datetime.datetime.now().replace( hour=0, minute=0, second=0, microsecond=0).timestamp() self.total_steps_in_day = floor(24 * 60 * 60 / self.dT) self.current_day_index = 0 self.number_of_bucket_days = int(buffer / self.total_steps_in_day) self.bucket_index = False self.length = 1 try: super().__init__(internal, topic_params, config, id=id) except Exception as e: redisDB.set("Error mqtt" + self.id, True) self.logger.error(e)
def __init__(self, id, solver_name, model_path, control_frequency, repetition, output_config, input_config_parser, config, horizon_in_steps, dT_in_seconds, optimization_type): super(ControllerBase, self).__init__() self.logger = MessageLogger.get_logger(__name__, id) self.logger.info("Initializing optimization controller " + id) #pyomo_path = "/usr/src/app/logs/pyomo/" + str(id) self.pyomo_path = "/usr/src/app/logs/pyomo/" self.pyomo_path = os.path.abspath(self.pyomo_path) self.logger.debug("pyomo_path "+str(self.pyomo_path)) if not os.path.exists(self.pyomo_path): try: os.makedirs(self.pyomo_path, mode=0o777, exist_ok=False) os.chmod(self.pyomo_path, 0o777) os.chmod(self.pyomo_path, 0o777) except Exception as e: self.logger.error(e) TempfileManager.tempdir = self.pyomo_path self.id = id self.results = "" self.model_path = os.path.abspath(model_path) self.solver_name = solver_name self.control_frequency = control_frequency self.repetition = repetition self.horizon_in_steps = horizon_in_steps self.dT_in_seconds = dT_in_seconds self.output_config = output_config self.input_config_parser = input_config_parser self.stopRequest = None#threading.Event() self.redisDB = RedisDB() self.lock_key = "id_lock" self.optimization_type = optimization_type self.stop_signal_key = "opt_stop_" + self.id self.finish_status_key = "finish_status_" + self.id self.redisDB.set(self.stop_signal_key, False) self.redisDB.set(self.finish_status_key, False) self.repetition_completed = False self.preprocess = False self.input = None self.output = None self.solver_ipopt_max_iteration = config.getint("SolverSection", "solver.ipopt.max.iteration", fallback=1000) self.solver_ipopt_timeout = config.getint("SolverSection", "solver.ipopt.timeout", fallback=120) self.solver_gurobi_max_iteration = config.getint("SolverSection", "solver.gurobi.max.iteration", fallback=1000) self.solver_gurobi_timeout = config.getint("SolverSection", "solver.gurobi.timeout", fallback=3) if "False" in self.redisDB.get("Error mqtt" + self.id): self.output = OutputController(self.id, self.output_config) if "False" in self.redisDB.get("Error mqtt" + self.id): self.input = InputController(self.id, self.input_config_parser, config, self.control_frequency, self.horizon_in_steps, self.dT_in_seconds) self.monitor = MonitorPub(config, id)
def __init__(self, internal, topic_params, config, generic_name, id, event_callback): redisDB = RedisDB() self.logger = MessageLogger.get_logger(__name__, id) self.generic_name = generic_name self.event_callback = event_callback try: super().__init__(internal, topic_params, config, id=id) except Exception as e: redisDB.set("Error mqtt" + self.id, True) self.logger.error(e)
def __init__(self, config, id): self.logger = MessageLogger.get_logger(__name__, id) self.id = id self.config = config self.host = config.get("IO", "mqtt.host") self.port = config.getint("IO", "mqtt.port", fallback=1883) self.topic_params = json.loads(config.get("IO", "monitor.mqtt.topic")) self.host, host_params, self.qos, self.topic, self.port = ConfigParserUtils.extract_host_params( self.host, self.port, self.topic_params, self.config, None) self.mqtt = None self.init_mqtt()
def __init__(self, id=None, output_config=None): self.logger = MessageLogger.get_logger(__name__, id) self.logger.info("Output Class started") self.output_config = output_config self.mqtt = {} self.redisDB = RedisDB() self.mqtt_params = {} self.output_mqtt = {} self.id = id self.config_parser_utils = ConfigParserUtils() self.logger.debug("output_config: " + str(self.output_config) + " " + str(type(self.output_config))) if self.output_config is not None: self.extract_mqtt_params() self.init_mqtt()
def __init__(self, host, mqttPort, client_id, keepalive=60, username=None, password=None, ca_cert_path=None, set_insecure=False, id=None): self.logger = MessageLogger.get_logger(__name__, id) self.host = host self.port = int(mqttPort) self.keepalive = keepalive self.receivedMessages = [] self.topic_sub_ack = [] self.callback_function = None self.client_id = client_id self.id = id self.connected = False self.client = mqtt.Client(client_id, clean_session=False) if username is not None and password is not None: self.logger.debug("u " + username + " p " + password) self.client.username_pw_set(username, password) if ca_cert_path is not None and len(ca_cert_path) > 0: self.logger.debug("ca " + ca_cert_path) self.client.tls_set(ca_certs=ca_cert_path) self.logger.debug("insec " + str(set_insecure)) if not isinstance(set_insecure, bool): set_insecure = bool(set_insecure) self.client.tls_insecure_set(set_insecure) self.client.on_message = self.on_message self.client.on_publish = self.on_publish self.client.on_connect = self.on_connect self.client.on_subscribe = self.on_subscribe self.client.on_disconnect = self.on_disconnect self.logger.info("Trying to connect to the MQTT broker " + str(self.host) + " " + str(self.port)) try: self.client.connect(self.host, self.port, self.keepalive) except Exception as e: self.connected = False msg = "Invalid MQTT host " + str(self.host) + " " + str(self.port) self.logger.error("Error connecting client " + str(self.host) + " " + str(self.port) + " " + str(e)) raise InvalidMQTTHostException(msg) #self.client.loop_forever() self.client.loop_start()
def __init__(self, model_name, control_frequency, horizon_in_steps, dT_in_seconds, repetition, solver, id, optimization_type, single_ev): self.logger = MessageLogger.get_logger(__name__, id) self.model_name = model_name self.control_frequency = control_frequency self.horizon_in_steps = horizon_in_steps self.dT_in_seconds = dT_in_seconds self.repetition = repetition self.solver = solver self.id = id self.optimization_type = optimization_type self.single_ev = single_ev self.redisDB = RedisDB() self.pyro_mip_server = None
def __init__(self, internal_topic_params, config, id, control_frequency, horizon_in_steps, dT_in_seconds, q): self.logger = MessageLogger.get_logger(__name__, id) self.pv_data = {} self.q = q self.control_frequency = control_frequency self.horizon_in_steps = horizon_in_steps self.dT_in_seconds = dT_in_seconds self.topic = "P_PV" self.redisDB = RedisDB() self.id = id try: super().__init__(True, internal_topic_params, config, control_frequency, id) except Exception as e: self.redisDB.set("Error mqtt" + self.id, True) self.logger.error(e)
def __init__(self, internal, topic_params, config, generic_name, id, buffer, dT, base_value_flag): self.id = id self.redisDB = RedisDB() self.logger = MessageLogger.get_logger(__name__, id) self.generic_name = generic_name self.buffer = buffer self.dT = dT self.base_value_flag = base_value_flag self.set_data_update(False) persist_real_data_path = config.get("IO","persist.real.data.path", fallback="optimization/resources") persist_real_data_path = os.path.join("/usr/src/app", persist_real_data_path, id, "real") self.persist_real_data_file = os.path.join(persist_real_data_path, generic_name+".txt") if "detachable" in topic_params.keys(): self.detachable = topic_params["detachable"] else: self.detachable = False if self.detachable: self.value_used_once = False if "reuseable" in topic_params.keys(): self.reuseable = topic_params["reuseable"] else: self.reuseable = False if self.reuseable and not os.path.exists(persist_real_data_path): os.makedirs(persist_real_data_path) self.start_of_day = datetime.datetime.now().replace(hour=0, minute=0, second=0, microsecond=0).timestamp() self.total_steps_in_day = floor(24 * 60 * 60 / self.dT) self.current_day_index = 0 self.number_of_bucket_days = int(buffer / self.total_steps_in_day) self.bucket_index = False self.length = 1 try: super(BaseDataReceiver, self).__init__(internal, topic_params, config, id=id) except Exception as e: self.redisDB.set("Error mqtt" + id, True) self.logger.error(e) if self.reuseable: formated_data = self.read_data() if formated_data is not None and len(formated_data) > 0: self.length = len(formated_data) self.data.update(formated_data) self.set_data_update(True) self.last_time = time.time()
def __init__(self, host, mqttPort, client_id, keepalive=60, username=None, password=None, ca_cert_path=None, set_insecure=False, id=None, connect_check_flag=False): self.logger = MessageLogger.get_logger(__name__, id) self.exit = False self.host = host self.port = int(mqttPort) self.keepalive = keepalive self.receivedMessages = [] self.topic_sub_ack = [] self.callback_function = None self.client_id = client_id self.id = id self.connected = False self.client = mqtt.Client(client_id, clean_session=False) if username is not None and password is not None: self.logger.debug("u " + username + " p " + password) self.client.username_pw_set(username, password) if ca_cert_path is not None and len(ca_cert_path) > 0: self.logger.debug("ca " + ca_cert_path) if os.path.exists(ca_cert_path): self.client.tls_set(ca_certs=ca_cert_path) else: self.logger.warning("ca cert path does not exists " + str(ca_cert_path)) self.logger.debug("insec " + str(set_insecure)) if not isinstance(set_insecure, bool): set_insecure = bool(set_insecure) self.client.tls_insecure_set(set_insecure) self.client.on_message = self.on_message self.client.on_publish = self.on_publish self.client.on_connect = self.on_connect self.client.on_subscribe = self.on_subscribe self.client.on_disconnect = self.on_disconnect self.connect_to_mqtt() if connect_check_flag: self.check_connect_thread = threading.Thread( target=self.check_conection).start()
def __init__(self, id=None, output_config=None): self.logger = MessageLogger.get_logger(__name__, id) self.logger.info("Output Class started") self.output_config = output_config self.mqtt = {} self.redisDB = RedisDB() self.mqtt_params = {} self.output_mqtt = {} self.id = id self.logger.debug("output_config: " + str(self.output_config) + " " + str(type(self.output_config))) if self.output_config is not None: self.mqtt_params = ConfigParserUtils.extract_mqtt_params_output( self.output_config, "error_calculation", False) self.logger.debug("params = " + str(self.mqtt_params)) self.init_mqtt()
def __init__(self, internal_topic_params, config, queue, publish_frequency, topic, id, horizon_in_steps, dT_in_seconds): self.logger = MessageLogger.get_logger(__name__, id) self.load_data = {} self.flag = True self.file_path = os.path.join("/usr/src/app", "optimization", "loadData.dat") self.q = queue self.topic = topic self.horizon_in_steps = horizon_in_steps self.dT_in_seconds = dT_in_seconds self.id = id self.redisDB = RedisDB() try: super().__init__(True, internal_topic_params, config, publish_frequency, id) except Exception as e: self.redisDB.set("Error mqtt" + self.id, True) self.logger.error(e)
def __init__(self, config, input_config_parser, id, control_frequency, horizon_in_steps, dT_in_seconds, generic_name): super().__init__() self.logger = MessageLogger.get_logger(__name__, id) self.logger.debug("PV prediction class") self.stopRequest = threading.Event() self.config = config self.q = Queue(maxsize=0) self.generic_name = generic_name self.control_frequency = control_frequency raw_pv_data_topic = input_config_parser.get_params(generic_name) opt_values = input_config_parser.get_optimization_values() city = "Bonn" country = "Germany" try: city = opt_values["City"][None] country = opt_values["Country"][None] except Exception: self.logger.error("City or country not present in pv meta") location = {"city": city, "country": country} maxPV = float(opt_values["PV_Inv_Max_Power"][None]) pv_forecast_topic = config.get("IO", "forecast.topic") pv_forecast_topic = json.loads(pv_forecast_topic) pv_forecast_topic[ "topic"] = pv_forecast_topic["topic"] + self.generic_name self.base_data = {} radiation = Radiation(config, maxPV, dT_in_seconds, location) self.pv_thread = threading.Thread(target=self.get_pv_data_from_source, args=(radiation, )) self.pv_thread.start() self.raw_data = GenericDataReceiver(False, raw_pv_data_topic, config, self.generic_name, id, 1, dT_in_seconds) self.pv_forecast_pub = PVForecastPublisher(pv_forecast_topic, config, id, control_frequency, horizon_in_steps, dT_in_seconds, self.q) self.pv_forecast_pub.start()
def __init__(self, id, solver_name, model_path, control_frequency, repetition, output_config, input_config_parser, config, horizon_in_steps, dT_in_seconds, optimization_type): super().__init__() pyomo_path = "/usr/src/app/logs/pyomo_" + str(id) if not os.path.exists(pyomo_path): os.makedirs(pyomo_path, mode=0o777, exist_ok=False) os.chmod(pyomo_path, 0o777) TempfileManager.tempdir = pyomo_path self.logger = MessageLogger.get_logger(__name__, id) self.logger.info("Initializing optimization controller " + id) self.id = id self.results = "" self.model_path = model_path self.solver_name = solver_name self.control_frequency = control_frequency self.repetition = repetition self.horizon_in_steps = horizon_in_steps self.dT_in_seconds = dT_in_seconds self.output_config = output_config self.input_config_parser = input_config_parser self.stopRequest = None #threading.Event() self.redisDB = RedisDB() self.lock_key = "id_lock" self.optimization_type = optimization_type self.stop_signal_key = "opt_stop_" + self.id self.finish_status_key = "finish_status_" + self.id self.redisDB.set(self.stop_signal_key, False) self.redisDB.set(self.finish_status_key, False) self.repetition_completed = False self.preprocess = False self.input = None self.output = None if "False" in self.redisDB.get("Error mqtt" + self.id): self.output = OutputController(self.id, self.output_config) if "False" in self.redisDB.get("Error mqtt" + self.id): self.input = InputController(self.id, self.input_config_parser, config, self.control_frequency, self.horizon_in_steps, self.dT_in_seconds) """try:
def get_config_and_logger(parent, source_config_path, destination_config_path): ConfigUpdater.copy_config(source_config_path, destination_config_path) # Creating an object of the configuration file (standard values) config = configparser.RawConfigParser() config.optionxform = str config.read(destination_config_path) ConfigUpdater.set_constant_values(config) log_level = config.get("IO", "log.level", fallback="DEBUG") logger = MessageLogger.set_and_get_logger_parent(id="", level=log_level, parent=parent) for section in config.sections(): logger.info("[" + section + "]") for key, value in config.items(section): logger.info(key + " = " + value) return config, logger
def __init__(self, id, mqtt_time_threshold, config): self.data_dict = {} self.initial_pass = False self.logger = MessageLogger.get_logger(__name__, id) persist_real_data_path = "optimization/resources" persist_real_data_path = os.path.join("/usr/src/app", persist_real_data_path, id, "real") # persist_real_data_path = os.path.join(os.getcwd()) self.persist_real_data_file = os.path.join(persist_real_data_path, "ev_info" + ".txt") self.ev_park = EVPark(id, self.persist_real_data_file) self.id = id self.mqtt_time_threshold = mqtt_time_threshold self.event_data = [] self.charger_unplug_event = [] persist_base_file_path = config.get("IO", "persist.base.file.path") self.charger_base_path = os.path.join("/usr/src/app", persist_base_file_path, str(id), Constants.persisted_folder_name) self.charger_file_name = "chargers.json"
def __init__(self, config): self.logger = MessageLogger.get_logger(__name__, None) self.config = config self.docker_file_names = self.get_docker_file_names() if len(self.docker_file_names) > 0: self.docker_components = self.get_docker_file_components() self.topic_params = json.loads( config.get("IO", "monitor.mqtt.topic")) self.check_frequency = config.getint("IO", "monitor.frequency.sec", fallback=60) self.allowed_delay_count = config.getfloat("IO", "allowed.delay.count", fallback=2) self.timeout = config.getint("IO", "timeout", fallback=60) self.status = Status(False, self.topic_params, config) self.service_status = {} self.log_persisted = {} self.start_services() self.check_status_thread = threading.Thread( target=self.check_status) self.check_status_thread.start()
def __init__(self, input_config_file, input_config_mqtt, model_name, id, optimization_type, persist_path, restart): self.logger = MessageLogger.get_logger(__name__, id) self.model_name = model_name self.model_variables, self.param_key_list = ModelParamsInfo.get_model_param( self.model_name) self.input_config_file = input_config_file self.input_config_mqtt = input_config_mqtt self.base, self.derived = ModelDerivedParameters.get_derived_parameter_mapping( model_name, optimization_type) self.mqtt_params = {} self.generic_names = [] self.generic_file_names = [] self.defined_prediction_names = [] self.defined_pv_prediction_names = [] self.defined_pv_lstm_names = [] self.defined_external_names = ["SoC_Value"] self.defined_preprocess_names = [] self.defined_event_names = [] self.defined_sampling_names = [] self.prediction_names = [] self.pv_prediction_names = [] self.pv_lstm_names = [] self.external_names = [] self.preprocess_names = [] self.event_names = [] self.sampling_names = [] self.set_params = {} self.extract_mqtt_params() self.car_park = None self.simulator = None self.optimization_params = self.extract_optimization_values() self.restart = restart if restart: self.read_persisted_data(persist_path) self.logger.debug("optimization_params: " + str(self.optimization_params)) self.logger.info("generic names = " + str(self.generic_names))
def setup(): signal.signal(signal.SIGINT, signal_handler) config_path = "/usr/src/app/optimization/resources/ConfigFile.properties" config_path_default = "/usr/src/app/config/ConfigFile.properties" ConfigUpdater.copy_config(config_path_default, config_path) # Creating an object of the configuration file (standard values) config = configparser.RawConfigParser() config.read(config_path) log_level = config.get("IO", "log.level", fallback="DEBUG") logger = MessageLogger.set_and_get_logger_parent(id="", level=log_level) redisDB = clear_redis(logger) copy_models() copy_env_varibles() #logger.debug("env = "+str(os.environ)) zmqHost = config.get("IO", "zmq.host") pubPort = config.get("IO", "zmq.pub.port") subPort = config.get("IO", "zmq.sub.port") zmqForwarder = ForwarderDevice(zmqHost, pubPort, subPort) zmqForwarder.start() return logger, redisDB
def __init__(self, internal, topic_params, config, publish_frequency, id=None): super().__init__() self.logger = MessageLogger.get_logger(__name__, id) self.internal = internal self.config = config self.channel = "MQTT" self.id = id self.logger.debug("id = " + str(self.id)) if internal: self.channel = config.get("IO", "channel") if topic_params is None: self.topic_params = {} else: self.topic_params = topic_params self.publish_frequency = publish_frequency self.stopRequest = threading.Event() if self.channel == "MQTT": self.init_mqtt() elif self.channel == "ZMQ": self.init_zmq() self.logger.info("Initializing data publisher thread for topic " + str(self.topic_params))
import configparser import datetime import json from math import floor, ceil import os import requests from IO.locationData import LocationData from utils_intern.messageLogger import MessageLogger from utils_intern.timeSeries import TimeSeries logger = MessageLogger.get_logger_parent() # Date = Date & time (UTC) # EPV = PV power output if requested (W) # Bi = In-plane beam irradiance (W/m2) # Di = Diffuse in-plane irradiance (W/m2) (if radiation components are requested) # Ri = Refleted in-plane irradiance (W/m2) (if radiation components are requested) # As = Sun elevation (degrees above horizon) # Tamb = Air temperature (°C) # W10 = Wind speed at 10m (m/s) class RadiationData: def __init__(self, date, pv_output): self.date = datetime.datetime(date.year, date.month, date.day, date.hour, 0) self.pv_output = pv_output def default(self):
""" Created on Okt 19 11:53 2018 @author: nishit """ import configparser from config.configUpdater import ConfigUpdater from connector.Connector import Connector from utils_intern.messageLogger import MessageLogger connector_status = {} if __name__ == '__main__': config = None config_path = "/usr/src/app/connector/resources/connectorConfig.properties" config_path_default = "/usr/src/app/config/connectorConfig.properties" ConfigUpdater.copy_config(config_path_default, config_path, True) config = configparser.ConfigParser() config.optionxform = str config.read(config_path) log_level = config.get("IO", "log.level", fallback="DEBUG") logger = MessageLogger.set_and_get_logger_parent(id="", level=log_level) connector = Connector(config)
def __init__(self, id, input_config_parser, config, control_frequency, horizon_in_steps, dT_in_seconds): self.logger = MessageLogger.get_logger(__name__, id) self.stop_request = False self.optimization_data = {} self.input_config_parser = input_config_parser self.logger.debug("Config parser: " + str(self.input_config_parser)) self.config = config self.control_frequency = control_frequency self.horizon_in_steps = horizon_in_steps self.dT_in_seconds = dT_in_seconds self.id = id self.prediction_mqtt_flags = {} self.non_prediction_mqtt_flags = {} self.external_mqtt_flags = {} self.preprocess_mqtt_flags = {} self.event_mqtt_flags = {} self.generic_data_mqtt_flags = {} self.generic_names = None self.mqtt_timer = {} mqtt_time_threshold = float( self.config.get("IO", "mqtt.detach.threshold", fallback=180)) self.inputPreprocess = InputPreprocess(self.id, mqtt_time_threshold) self.event_data = {} self.parse_input_config() self.set_timestep_data() sec_in_day = 24 * 60 * 60 self.steps_in_day = floor(sec_in_day / dT_in_seconds) self.required_buffer_data = 0 horizon_sec = horizon_in_steps * dT_in_seconds while horizon_sec > 0: self.required_buffer_data += self.steps_in_day horizon_sec = horizon_sec - sec_in_day self.internal_receiver = {} for name, flag in self.prediction_mqtt_flags.items(): if flag: """ should be prediction topic instead of load""" prediction_topic = config.get("IO", "forecast.topic") prediction_topic = json.loads(prediction_topic) prediction_topic["topic"] = prediction_topic["topic"] + name self.internal_receiver[name] = GenericDataReceiver( True, prediction_topic, config, name, self.id, self.required_buffer_data, self.dT_in_seconds) for name, flag in self.non_prediction_mqtt_flags.items(): if flag: non_prediction_topic = config.get("IO", "forecast.topic") non_prediction_topic = json.loads(non_prediction_topic) non_prediction_topic[ "topic"] = non_prediction_topic["topic"] + name self.internal_receiver[name] = GenericDataReceiver( True, non_prediction_topic, config, name, self.id, self.required_buffer_data, self.dT_in_seconds) # ESS data self.external_data_receiver = {} for name, flag in self.external_mqtt_flags.items(): if flag: if name == "SoC_Value": params = self.input_config_parser.get_params(name) self.logger.debug("params for MQTT SoC_Value: " + str(params)) self.external_data_receiver[name] = SoCValueDataReceiver( False, params, config, name, self.id, self.required_buffer_data, self.dT_in_seconds) self.preprocess_data_receiver = {} for name, flag in self.preprocess_mqtt_flags.items(): if flag: params = self.input_config_parser.get_params(name) self.logger.debug("params for MQTT " + name + " : " + str(params)) self.external_data_receiver[name] = BaseValueDataReceiver( False, params, config, name, self.id, self.required_buffer_data, self.dT_in_seconds) self.event_data_receiver = {} for name, flag in self.event_mqtt_flags.items(): if flag: params = self.input_config_parser.get_params(name) self.logger.debug("params for MQTT " + name + " : " + str(params)) self.external_data_receiver[name] = GenericEventDataReceiver( False, params, config, name, self.id, self.inputPreprocess.event_received) self.generic_data_receiver = {} if len(self.generic_data_mqtt_flags) > 0: for generic_name, mqtt_flag in self.generic_data_mqtt_flags.items( ): if mqtt_flag: topic = self.input_config_parser.get_params(generic_name) self.generic_data_receiver[ generic_name] = GenericDataReceiver( False, topic, config, generic_name, self.id, self.required_buffer_data, self.dT_in_seconds)
""" Created on Okt 19 11:54 2018 @author: nishit """ import json import time from senml import senml from IO.RecPub import RecPub from utils_intern.messageLogger import MessageLogger logger = MessageLogger.get_logger_parent(parent="connector") class ParserConnector(RecPub): def __init__(self, receiver_params, publisher_workers, config, house, monitor_connector): self.pub_prefix = config.get("IO", "pub.topic.prefix") + str(house) + "/" self.key_level = int(config.get(house, "key.level")) self.key_separator = config.get(house, "key.separator", fallback="/") self.data_type = config.get(house, "data.type", fallback="json") self.key_map = dict(config.items("KEYS")) self.house = house self.base = senml.SenMLMeasurement() self.base.name = house + "/" super().__init__(receiver_params, publisher_workers, config, house, monitor_connector)
def __init__(self, id, mqtt_time_threshold): self.data_dict = {} self.logger = MessageLogger.get_logger(__name__, id) self.ev_park = EVPark(id) self.mqtt_time_threshold = mqtt_time_threshold self.event_data = {}
def __init__(self, config, horizon_in_steps, topic_name, dT_in_seconds, id, type, opt_values): super(MachineLearning, self).__init__() self.logger = MessageLogger.get_logger(__name__, id) self.horizon_in_steps = horizon_in_steps self.topic_name = topic_name self.dT_in_seconds = dT_in_seconds self.id = id self.type = type self.redisDB = RedisDB() self.influxDB = InfluxDBManager() if self.type == "load": self.model_data_dT = 60 self.input_size = 1440 self.hidden_size = 100 self.batch_size = 1 self.num_epochs = 10 self.output_size = 1440 self.processingData = ProcessingData(type) self.model_file_container_base = os.path.join( "/usr/src/app/prediction/model", "model_base.h5") elif self.type == "pv": self.model_data_dT = 60 self.input_size = 1 self.input_size_hist = 24 self.hidden_size = 100 self.batch_size = 1 self.num_epochs = 10 self.output_size = 1440 city = "Bonn" country = "Germany" self.logger.info("opt va " + str(opt_values)) try: if "City" in opt_values.keys( ) and "Country" in opt_values.keys(): for k, v in opt_values["City"].items(): city = v break for k, v in opt_values["Country"].items(): country = v break else: self.logger.error("City or country not present in pv meta") except Exception: self.logger.error("City or country not present in pv meta") location = {"city": city, "country": country} radiation = Radiation(config, 1, dT_in_seconds, location, horizon_in_steps) hist_data = radiation.get_complete_data() self.processingData = ProcessingData(type, hist_data) self.model_file_container_base = os.path.join( "/usr/src/app/prediction/model", "model_base_pv.h5") base_path = "/usr/src/app/prediction/resources" dir_data = os.path.join(base_path, self.id) if not os.path.exists(dir_data): os.makedirs(dir_data) self.raw_data_file_container = os.path.join( base_path, self.id, "raw_data_" + str(topic_name) + ".csv") self.model_file_container = os.path.join( base_path, self.id, "model_" + str(topic_name) + ".h5") self.model_file_container_temp = os.path.join( base_path, self.id, "model_temp_" + str(topic_name) + ".h5") self.model_file_container_train = os.path.join( base_path, self.id, "model_train_" + str(topic_name) + ".h5") self.forecast_pub = None self.prediction_thread = None self.training_thread = None self.raw_data = None self.models = Models(self.model_file_container, self.model_file_container_temp, self.model_file_container_base)