Esempio n. 1
0
 def __init__(self, internal, topic_params, config, generic_name, id,
              buffer, dT, base_value_flag):
     redisDB = RedisDB()
     self.logger = MessageLogger.get_logger(__name__, id)
     self.generic_name = generic_name
     self.buffer = buffer
     self.dT = dT
     self.base_value_flag = base_value_flag
     if "detachable" in topic_params.keys():
         self.detachable = topic_params["detachable"]
     else:
         self.detachable = False
     if "reuseable" in topic_params.keys():
         self.reuseable = topic_params["reuseable"]
     else:
         self.reuseable = False
     self.start_of_day = datetime.datetime.now().replace(
         hour=0, minute=0, second=0, microsecond=0).timestamp()
     self.total_steps_in_day = floor(24 * 60 * 60 / self.dT)
     self.current_day_index = 0
     self.number_of_bucket_days = int(buffer / self.total_steps_in_day)
     self.bucket_index = False
     self.length = 1
     try:
         super().__init__(internal, topic_params, config, id=id)
     except Exception as e:
         redisDB.set("Error mqtt" + self.id, True)
         self.logger.error(e)
Esempio n. 2
0
def clear_redis(logger):
    logger.info("reset redis")
    from IO.redisDB import RedisDB
    redisDB = RedisDB()
    redisDB.reset()
    redisDB.set("time", time.time())
    return redisDB
 def __init__(self, topic_params, config, buffer, save_path, topic_name, id,
              load_file_data, max_file_size_mins, influxDB):
     self.file_path = save_path
     redisDB = RedisDB()
     try:
         super().__init__(False, topic_params, config, [], id)
     except Exception as e:
         redisDB.set("Error mqtt" + self.id, True)
         logger.error(e)
     self.influxDB = influxDB
     self.buffer_data = []
     self.buffer = buffer
     self.current_minute = None
     self.id = id
     self.sum = 0
     self.count = 0
     self.minute_data = []
     self.topic_name = topic_name
     self.max_file_size_mins = max_file_size_mins
     self.save_cron_freq = config.getint("IO",
                                         "raw.data.file.save.frequency.sec",
                                         fallback=3600)
     self.copy_file_data_to_influx()
     if load_file_data:
         self.load_data()
     self.file_save_thread = threading.Thread(target=self.save_to_file_cron,
                                              args=(self.save_cron_freq, ))
     self.file_save_thread.start()
 def __init__(self, internal, topic_params, config, emptyValue={}, id=None, section=None, prepare_topic_qos=True,
              sub_pub=False, connect_check_flag=False):
     super(DataReceiver, self).__init__()
     self.logger = MessageLogger.get_logger(__name__, id)
     self.stop_request = False
     self.internal = internal
     self.topic_params = topic_params
     self.prepare_topic_qos = prepare_topic_qos
     self.emptyValue = emptyValue
     self.connect_check_flag = connect_check_flag
     self.data = self.emptyValue.copy()
     self.data_update = False
     self.config = config
     self.channel = "MQTT"
     self.topics = None
     self.port = None
     self.host_params = {}
     self.first_time = 0
     self.last_time = 0
     self.id = id
     self.section = section
     self.redisDB = RedisDB()
     self.sub_pub = sub_pub
     if self.section is None:
         self.section = "IO"
     self.setup()
     if self.channel == "MQTT":
             self.init_mqtt(self.topics)
     elif self.channel == "ZMQ":
         self.init_zmq(self.topics)
Esempio n. 5
0
 def __init__(self, config, id, topic_name, dT_in_seconds,
              control_frequency, horizon_in_steps,
              prediction_data_file_container, raw_data_file_container,
              topic_params, error_result_file_path, output_config,
              influxDB):
     self.logger = MessageLogger.get_logger(__name__, id)
     self.control_frequency = control_frequency
     self.horizon_in_steps = horizon_in_steps
     self.dT_in_seconds = dT_in_seconds
     self.raw_data_file_container = raw_data_file_container
     self.raw_data = RawDataReader()
     self.stopRequest = threading.Event()
     self.topic_name = topic_name
     self.id = id
     self.prediction_data_file_container = prediction_data_file_container
     self.error_result_file_path = error_result_file_path
     self.output_config = output_config
     self.topic_params = topic_params
     self.influxDB = influxDB
     redisDB = RedisDB()
     try:
         if self.update_topic_params():
             super().__init__(False, self.topic_params, config,
                              control_frequency, id)
         else:
             super().__init__(True, self.topic_params, config,
                              control_frequency, id)
     except Exception as e:
         redisDB.set("Error mqtt" + self.id, True)
         self.logger.error(e)
Esempio n. 6
0
 def exit_gracefully(self, signum, frame):
     print("\nReceived {} signal".format(self.signals[signum]))
     print("Cleaning up resources. End of the program")
     from IO.redisDB import RedisDB
     redisDB = RedisDB()
     redisDB.set("End ofw", "True")
     time.sleep(6)
     self.kill_now = True
Esempio n. 7
0
def clear_redis(logger):
    logger.info("reset redis training key locks")
    training_lock_key = "training_lock"
    from IO.redisDB import RedisDB
    redisDB = RedisDB()
    try:
        redisDB.remove(training_lock_key)
    except Exception as e:
        logger.debug("training_lock key does not exist")
Esempio n. 8
0
    def __init__(self, id, solver_name, model_path, control_frequency, repetition, output_config, input_config_parser,
                 config, horizon_in_steps, dT_in_seconds, optimization_type):

        super(ControllerBase, self).__init__()

        self.logger = MessageLogger.get_logger(__name__, id)
        self.logger.info("Initializing optimization controller " + id)

        #pyomo_path = "/usr/src/app/logs/pyomo/" + str(id)
        self.pyomo_path = "/usr/src/app/logs/pyomo/"
        self.pyomo_path = os.path.abspath(self.pyomo_path)
        self.logger.debug("pyomo_path "+str(self.pyomo_path))

        if not os.path.exists(self.pyomo_path):
            try:
                os.makedirs(self.pyomo_path, mode=0o777, exist_ok=False)
                os.chmod(self.pyomo_path, 0o777)
                os.chmod(self.pyomo_path, 0o777)
            except Exception as e:
                self.logger.error(e)
        TempfileManager.tempdir = self.pyomo_path


        self.id = id
        self.results = ""

        self.model_path = os.path.abspath(model_path)
        self.solver_name = solver_name
        self.control_frequency = control_frequency
        self.repetition = repetition
        self.horizon_in_steps = horizon_in_steps
        self.dT_in_seconds = dT_in_seconds
        self.output_config = output_config
        self.input_config_parser = input_config_parser
        self.stopRequest = None#threading.Event()
        self.redisDB = RedisDB()
        self.lock_key = "id_lock"
        self.optimization_type = optimization_type
        self.stop_signal_key = "opt_stop_" + self.id
        self.finish_status_key = "finish_status_" + self.id
        self.redisDB.set(self.stop_signal_key, False)
        self.redisDB.set(self.finish_status_key, False)
        self.repetition_completed = False
        self.preprocess = False
        self.input = None
        self.output = None
        self.solver_ipopt_max_iteration = config.getint("SolverSection", "solver.ipopt.max.iteration", fallback=1000)
        self.solver_ipopt_timeout = config.getint("SolverSection", "solver.ipopt.timeout", fallback=120)
        self.solver_gurobi_max_iteration = config.getint("SolverSection", "solver.gurobi.max.iteration", fallback=1000)
        self.solver_gurobi_timeout = config.getint("SolverSection", "solver.gurobi.timeout", fallback=3)
        if "False" in self.redisDB.get("Error mqtt" + self.id):
            self.output = OutputController(self.id, self.output_config)
        if "False" in self.redisDB.get("Error mqtt" + self.id):
            self.input = InputController(self.id, self.input_config_parser, config, self.control_frequency,
                                         self.horizon_in_steps, self.dT_in_seconds)
        self.monitor = MonitorPub(config, id)
 def __init__(self, internal, topic_params, config, generic_name, id,
              event_callback):
     redisDB = RedisDB()
     self.logger = MessageLogger.get_logger(__name__, id)
     self.generic_name = generic_name
     self.event_callback = event_callback
     try:
         super().__init__(internal, topic_params, config, id=id)
     except Exception as e:
         redisDB.set("Error mqtt" + self.id, True)
         self.logger.error(e)
def framework_stop(id):  # noqa: E501
    """Command for stoping the framework

     # noqa: E501

    :param id: Id of the registry to be stopped
    :type id: str

    :rtype: None
    """
    try:
        redis_db = RedisDB()
        flag = redis_db.get("run:" + id)
        logger.debug("Flag " + str(flag))
        message = ""
        code = 200
        if flag is not None and flag == "running":
            logger.debug("System running and trying to stop")
            redis_db.set("run:" + id, "stop")
            time.sleep(1)
            flag = redis_db.get("run:" + id)
            logger.debug("Flag in stop: " + str(flag))

            if flag is "stopped" or None:  # TODO: is none necessary?
                logger.debug("System stopped succesfully")
                message = "System stopped succesfully"
            elif "stopping" in flag:
                message = "System stopped succesfully"
                counter = 0
                while ("stopping" in flag):
                    flag = redis_db.get("run:" + id)
                    counter = counter + 1
                    if counter >= 15:
                        message = "system stopped succesfully"
                        break
                    else:
                        time.sleep(1)
                logger.debug("System stopped succesfully")
            else:
                message = "Problems while stopping the system"
                code = 500
        elif flag is not None and flag == "stopped":
            logger.debug("System already stopped")
            message = "System already stopped"
        elif flag is None:
            logger.debug("System already stopped")
            message = "System already stopped"
    except Exception as e:
        logger.error(e)
        message = "Error stoping the system"
        code = 500
    return message, code
 def __init__(self, id=None, output_config=None):
     self.logger = MessageLogger.get_logger(__name__, id)
     self.logger.info("Output Class started")
     self.output_config = output_config
     self.mqtt = {}
     self.redisDB = RedisDB()
     self.mqtt_params = {}
     self.output_mqtt = {}
     self.id = id
     self.config_parser_utils = ConfigParserUtils()
     self.logger.debug("output_config: " + str(self.output_config) + " " + str(type(self.output_config)))
     if self.output_config is not None:
         self.extract_mqtt_params()
         self.init_mqtt()
 def __init__(self, internal_topic_params, config, id, control_frequency, horizon_in_steps, dT_in_seconds, q):
     self.logger = MessageLogger.get_logger(__name__, id)
     self.pv_data = {}
     self.q = q
     self.control_frequency = control_frequency
     self.horizon_in_steps = horizon_in_steps
     self.dT_in_seconds = dT_in_seconds
     self.topic = "P_PV"
     self.redisDB = RedisDB()
     self.id = id
     try:
         super().__init__(True, internal_topic_params, config, control_frequency, id)
     except Exception as e:
         self.redisDB.set("Error mqtt" + self.id, True)
         self.logger.error(e)
Esempio n. 13
0
 def __init__(self, model_name, control_frequency, horizon_in_steps,
              dT_in_seconds, repetition, solver, id, optimization_type,
              single_ev):
     self.logger = MessageLogger.get_logger(__name__, id)
     self.model_name = model_name
     self.control_frequency = control_frequency
     self.horizon_in_steps = horizon_in_steps
     self.dT_in_seconds = dT_in_seconds
     self.repetition = repetition
     self.solver = solver
     self.id = id
     self.optimization_type = optimization_type
     self.single_ev = single_ev
     self.redisDB = RedisDB()
     self.pyro_mip_server = None
    def __init__(self, internal, topic_params, config, generic_name, id, buffer, dT, base_value_flag):
        self.id = id
        self.redisDB = RedisDB()
        self.logger = MessageLogger.get_logger(__name__, id)
        self.generic_name = generic_name
        self.buffer = buffer
        self.dT = dT
        self.base_value_flag = base_value_flag
        self.set_data_update(False)

        persist_real_data_path = config.get("IO","persist.real.data.path",
                                                 fallback="optimization/resources")
        persist_real_data_path = os.path.join("/usr/src/app", persist_real_data_path, id, "real")
        self.persist_real_data_file = os.path.join(persist_real_data_path, generic_name+".txt")

        if "detachable" in topic_params.keys():
            self.detachable = topic_params["detachable"]
        else:
            self.detachable = False
        if self.detachable:
            self.value_used_once = False
        if "reuseable" in topic_params.keys():
            self.reuseable = topic_params["reuseable"]
        else:
            self.reuseable = False
        if self.reuseable and not os.path.exists(persist_real_data_path):
            os.makedirs(persist_real_data_path)

        self.start_of_day = datetime.datetime.now().replace(hour=0, minute=0, second=0, microsecond=0).timestamp()
        self.total_steps_in_day = floor(24 * 60 * 60 / self.dT)
        self.current_day_index = 0
        self.number_of_bucket_days = int(buffer / self.total_steps_in_day)
        self.bucket_index = False
        self.length = 1

        try:
            super(BaseDataReceiver, self).__init__(internal, topic_params, config, id=id)
        except Exception as e:
            self.redisDB.set("Error mqtt" + id, True)
            self.logger.error(e)

        if self.reuseable:
            formated_data = self.read_data()
            if formated_data is not None and len(formated_data) > 0:
                self.length = len(formated_data)
                self.data.update(formated_data)
                self.set_data_update(True)
                self.last_time = time.time()
Esempio n. 15
0
def delete_output(id):  # noqa: E501
    """Deletes the output of the framework

     # noqa: E501

    :param id: Name of the registry to be deleted
    :type id: str

    :rtype: None
    """
    redisDB = RedisDB()
    output_keys = redisDB.get_keys_for_pattern("o:" + id + ":*")
    if output_keys is not None:
        for key in output_keys:
            redisDB.remove(key)
    return "success"
Esempio n. 16
0
 def __init__(self, id=None, output_config=None):
     self.logger = MessageLogger.get_logger(__name__, id)
     self.logger.info("Output Class started")
     self.output_config = output_config
     self.mqtt = {}
     self.redisDB = RedisDB()
     self.mqtt_params = {}
     self.output_mqtt = {}
     self.id = id
     self.logger.debug("output_config: " + str(self.output_config) + " " +
                       str(type(self.output_config)))
     if self.output_config is not None:
         self.mqtt_params = ConfigParserUtils.extract_mqtt_params_output(
             self.output_config, "error_calculation", False)
         self.logger.debug("params = " + str(self.mqtt_params))
         self.init_mqtt()
 def __init__(self, internal_topic_params, config, queue, publish_frequency, topic, id, horizon_in_steps,
              dT_in_seconds):
     self.logger = MessageLogger.get_logger(__name__, id)
     self.load_data = {}
     self.flag = True
     self.file_path = os.path.join("/usr/src/app", "optimization", "loadData.dat")
     self.q = queue
     self.topic = topic
     self.horizon_in_steps = horizon_in_steps
     self.dT_in_seconds = dT_in_seconds
     self.id = id
     self.redisDB = RedisDB()
     try:
         super().__init__(True, internal_topic_params, config, publish_frequency, id)
     except Exception as e:
         self.redisDB.set("Error mqtt" + self.id, True)
         self.logger.error(e)
Esempio n. 18
0
    def __init__(self, id, solver_name, model_path, control_frequency,
                 repetition, output_config, input_config_parser, config,
                 horizon_in_steps, dT_in_seconds, optimization_type):
        super().__init__()

        pyomo_path = "/usr/src/app/logs/pyomo_" + str(id)
        if not os.path.exists(pyomo_path):
            os.makedirs(pyomo_path, mode=0o777, exist_ok=False)
            os.chmod(pyomo_path, 0o777)
        TempfileManager.tempdir = pyomo_path

        self.logger = MessageLogger.get_logger(__name__, id)
        self.logger.info("Initializing optimization controller " + id)
        self.id = id
        self.results = ""
        self.model_path = model_path
        self.solver_name = solver_name
        self.control_frequency = control_frequency
        self.repetition = repetition
        self.horizon_in_steps = horizon_in_steps
        self.dT_in_seconds = dT_in_seconds
        self.output_config = output_config
        self.input_config_parser = input_config_parser
        self.stopRequest = None  #threading.Event()
        self.redisDB = RedisDB()
        self.lock_key = "id_lock"
        self.optimization_type = optimization_type
        self.stop_signal_key = "opt_stop_" + self.id
        self.finish_status_key = "finish_status_" + self.id
        self.redisDB.set(self.stop_signal_key, False)
        self.redisDB.set(self.finish_status_key, False)
        self.repetition_completed = False
        self.preprocess = False
        self.input = None
        self.output = None
        if "False" in self.redisDB.get("Error mqtt" + self.id):
            self.output = OutputController(self.id, self.output_config)
        if "False" in self.redisDB.get("Error mqtt" + self.id):
            self.input = InputController(self.id, self.input_config_parser,
                                         config, self.control_frequency,
                                         self.horizon_in_steps,
                                         self.dT_in_seconds)
        """try:
 def __init__(self, topic_params, config, buffer, training_data_size,
              save_path, topic_name, id):
     self.file_path = save_path
     redisDB = RedisDB()
     try:
         super().__init__(False, topic_params, config, [], id)
     except Exception as e:
         redisDB.set("Error mqtt" + self.id, True)
         logger.error(e)
     self.buffer_data = []
     self.buffer = buffer
     self.training_data_size = training_data_size
     self.current_minute = None
     self.id = id
     self.sum = 0
     self.count = 0
     self.minute_data = []
     self.topic_name = topic_name
     self.load_data()
     self.file_save_thread = threading.Thread(target=self.save_to_file_cron)
     self.file_save_thread.start()
Esempio n. 20
0
def get_output(id):  # noqa: E501
    """Get ouput of the optimization

     # noqa: E501

    :param id: Name of the registry to be actualized
    :type id: str

    :rtype: OptimizationOutput
    """
    result = {}
    redisDB = RedisDB()
    output_keys = redisDB.get_keys_for_pattern("o:" + id + ":*")
    if output_keys is not None:
        meta = redisDB.get("id_meta:" + id)
        if meta is not None:
            meta = json.loads(meta)
            dT = meta["dT_in_seconds"]
            for key in output_keys:
                sub_key = key.split(":")
                topic = sub_key[2]
                index = sub_key[3]
                json_value = redisDB.get(key)
                json_value = json.loads(json_value)
                time = None
                value = 0
                for t, v in json_value.items():
                    time = t
                    value = v
                    break
                if topic not in result.keys():
                    result[topic] = {}
                if time is not None:
                    t = float(time) + int(index) * dT
                    result[topic][t] = float(value)
            logger.debug(result)
    return OptimizationOutput.from_dict(result)
 def __init__(self, control_frequency, horizon_in_steps, num_timesteps, hidden_size, batch_size, num_epochs, raw_data_file, processingData,
              model_file_container, model_file_container_train, topic_name, id, dT_in_seconds, output_size, log):
     super().__init__()
     self.control_frequency = control_frequency
     self.horizon_in_steps = horizon_in_steps
     self.num_timesteps = num_timesteps
     self.hidden_size = hidden_size
     self.batch_size = batch_size
     self.num_epochs = num_epochs  # 10
     self.min_training_size = num_timesteps + output_size + 5
     self.model_file_container = model_file_container
     self.model_file_container_train = model_file_container_train
     self.today = datetime.datetime.now().day
     self.processingData = processingData
     self.trained = False
     self.raw_data_file = raw_data_file
     self.stopRequest = threading.Event()
     self.redisDB = RedisDB()
     self.training_lock_key = "training_lock"
     self.topic_name = topic_name
     self.id = id
     self.dT_in_seconds = dT_in_seconds
     self.output_size = output_size
     self.logger = log
Esempio n. 22
0
    def __init__(self, config, output_config, input_config_parser, id,
                 control_frequency, horizon_in_steps, dT_in_seconds,
                 generic_name):
        super().__init__()
        self.logger = MessageLogger.get_logger(__name__, id)
        self.logger.debug("PV prediction class")
        self.stopRequest = threading.Event()
        self.config = config
        self.q = Queue(maxsize=0)
        self.generic_name = generic_name
        self.control_frequency = control_frequency
        self.control_frequency = int(self.control_frequency / 2)
        self.control_frequency = 60
        self.id = id
        self.horizon_in_steps = horizon_in_steps
        self.dT_in_seconds = dT_in_seconds
        self.old_predictions = {}
        self.output_config = output_config
        self.influxDB = InfluxDBManager()
        self.raw_data_file_container = os.path.join(
            "/usr/src/app", "prediction/resources", self.id,
            "raw_data_" + str(generic_name) + ".csv")

        self.prediction_data_file_container = os.path.join(
            "/usr/src/app", "prediction/resources", self.id,
            "prediction_data_" + str(generic_name) + ".csv")

        self.error_result_file_path = os.path.join(
            "/usr/src/app", "prediction/resources", self.id,
            "error_data_" + str(generic_name) + ".csv")

        self.redisDB = RedisDB()
        raw_pv_data_topic = input_config_parser.get_params(generic_name)
        opt_values = input_config_parser.get_optimization_values()

        city = "Bonn"
        country = "Germany"
        try:
            city = opt_values["City"][None]
            country = opt_values["Country"][None]
        except Exception:
            self.logger.error("City or country not present in pv meta")

        location = {"city": city, "country": country}

        self.maxPV = float(opt_values["PV_Inv_Max_Power"][None])
        pv_forecast_topic = config.get("IO", "forecast.topic")
        pv_forecast_topic = json.loads(pv_forecast_topic)
        pv_forecast_topic[
            "topic"] = pv_forecast_topic["topic"] + self.generic_name

        self.radiation = Radiation(config, self.maxPV, dT_in_seconds, location,
                                   horizon_in_steps)

        self.max_file_size_mins = config.getint("IO",
                                                "pv.raw.data.file.size",
                                                fallback=10800)

        self.copy_prediction_file_data_to_influx()
        from prediction.rawLoadDataReceiver import RawLoadDataReceiver
        self.raw_data = RawLoadDataReceiver(raw_pv_data_topic, config, 1,
                                            self.raw_data_file_container,
                                            generic_name, self.id, False,
                                            self.max_file_size_mins,
                                            self.influxDB)

        self.pv_forecast_pub = PVForecastPublisher(pv_forecast_topic, config,
                                                   id, 60, horizon_in_steps,
                                                   dT_in_seconds, self.q)
        self.pv_forecast_pub.start()

        self.prediction_save_thread = threading.Thread(
            target=self.save_to_file_cron)
        self.prediction_save_thread.start()

        from prediction.errorReporting import ErrorReporting
        error_topic_params = config.get("IO", "error.topic")
        error_topic_params = json.loads(error_topic_params)
        error_topic_params[
            "topic"] = error_topic_params["topic"] + generic_name
        self.error_reporting = ErrorReporting(
            config, id, generic_name, dT_in_seconds, control_frequency,
            horizon_in_steps, self.prediction_data_file_container,
            self.raw_data_file_container, error_topic_params,
            self.error_result_file_path, self.output_config, self.influxDB)
        self.error_reporting.start()
    def __init__(self, config, horizon_in_steps, topic_name, dT_in_seconds, id,
                 type, opt_values):
        super(MachineLearning, self).__init__()
        self.logger = MessageLogger.get_logger(__name__, id)

        self.horizon_in_steps = horizon_in_steps
        self.topic_name = topic_name
        self.dT_in_seconds = dT_in_seconds
        self.id = id
        self.type = type

        self.redisDB = RedisDB()
        self.influxDB = InfluxDBManager()

        if self.type == "load":
            self.model_data_dT = 60
            self.input_size = 1440
            self.hidden_size = 100
            self.batch_size = 1
            self.num_epochs = 10
            self.output_size = 1440
            self.processingData = ProcessingData(type)
            self.model_file_container_base = os.path.join(
                "/usr/src/app/prediction/model", "model_base.h5")
        elif self.type == "pv":
            self.model_data_dT = 60
            self.input_size = 1
            self.input_size_hist = 24
            self.hidden_size = 100
            self.batch_size = 1
            self.num_epochs = 10
            self.output_size = 1440
            city = "Bonn"
            country = "Germany"
            self.logger.info("opt va " + str(opt_values))
            try:
                if "City" in opt_values.keys(
                ) and "Country" in opt_values.keys():
                    for k, v in opt_values["City"].items():
                        city = v
                        break
                    for k, v in opt_values["Country"].items():
                        country = v
                        break
                else:
                    self.logger.error("City or country not present in pv meta")
            except Exception:
                self.logger.error("City or country not present in pv meta")

            location = {"city": city, "country": country}

            radiation = Radiation(config, 1, dT_in_seconds, location,
                                  horizon_in_steps)
            hist_data = radiation.get_complete_data()
            self.processingData = ProcessingData(type, hist_data)
            self.model_file_container_base = os.path.join(
                "/usr/src/app/prediction/model", "model_base_pv.h5")

        base_path = "/usr/src/app/prediction/resources"
        dir_data = os.path.join(base_path, self.id)
        if not os.path.exists(dir_data):
            os.makedirs(dir_data)

        self.raw_data_file_container = os.path.join(
            base_path, self.id, "raw_data_" + str(topic_name) + ".csv")
        self.model_file_container = os.path.join(
            base_path, self.id, "model_" + str(topic_name) + ".h5")
        self.model_file_container_temp = os.path.join(
            base_path, self.id, "model_temp_" + str(topic_name) + ".h5")
        self.model_file_container_train = os.path.join(
            base_path, self.id, "model_train_" + str(topic_name) + ".h5")

        self.forecast_pub = None
        self.prediction_thread = None
        self.training_thread = None
        self.raw_data = None
        self.models = Models(self.model_file_container,
                             self.model_file_container_temp,
                             self.model_file_container_base)
Esempio n. 24
0
Created on Okt 04 12:03 2018

@author: nishit
"""
import configparser
import json

import time

from IO.redisDB import RedisDB
from config.configUpdater import ConfigUpdater
from prediction.loadPrediction import LoadPrediction

from utils_intern.messageLogger import MessageLogger

redisDB = RedisDB()

training_threads = {}


def check_training(config, logger):
    while True:
        keys = redisDB.get_keys_for_pattern("train:*")
        if keys is not None:
            keys_union = set(training_threads.keys()).union(keys)
            for key in keys_union:
                if key not in training_threads.keys() and key in keys:
                    sub_keys = key.split(":")
                    id = sub_keys[1]
                    prediction_name = sub_keys[2]
                    value = redisDB.get(key)
def framework_start(id, startOFW):  # noqa: E501
    """Command for starting the framework

     # noqa: E501

    :param id: Id of the registry to be started
    :type id: str
    :param startOFW: Start command for the optimization framework   repetitions: -1 infinite repetitions
    :type startOFW: dict | bytes

    :rtype: None
    """

    available_solvers = ["ipopt", "glpk", "bonmin", "gurobi", "cbc"]
    available_optimizers = ["discrete", "stochastic", "MPC"]
    response_msg = ""
    response_code = 200
    if connexion.request.is_json:
        logger.info("Starting the system")
        startOFW = Start.from_dict(connexion.request.get_json())
        models = get_models()
        if startOFW.model_name != "" and startOFW.model_name not in models:
            response_msg = "Model not available. Available models are :" + str(
                models)
            response_code = 400
        elif startOFW.solver not in available_solvers:
            response_msg = "Use one of the following solvers :" + str(
                available_solvers)
            response_code = 400
        elif startOFW.optimization_type not in available_optimizers:
            response_msg = "Use one of the following optimizer types : " + str(
                available_optimizers)
            response_code = 400
        else:
            dir = os.path.join(os.getcwd(), "optimization/resources", str(id))
            if not os.path.exists(dir):
                response_msg = "Id not existing"
                response_code = 400
            else:
                redis_db = RedisDB()
                flag = redis_db.get("run:" + id)
                if flag is not None and flag == "running":
                    response_msg = "System already running"
                else:
                    try:
                        msg = variable.start(id, startOFW)
                        if msg == 0:
                            response_msg = "System started succesfully"
                        else:
                            response_msg = "System could not start"
                            response_code = 400
                    except (InvalidModelException, MissingKeysException,
                            InvalidMQTTHostException) as e:
                        logger.error("Error " + str(e))
                        redis_db.set("run:" + id, "stopped")
                        response_msg = str(e)
                        response_code = 400
    else:
        response_msg = "Wrong Content-Type"
        response_code = 400
        logger.error("Wrong Content-Type")
    return response_msg, response_code
 def __init__(self):
     self.factory = {}
     self.statusThread = {}
     self.running = {}
     self.redisDB = RedisDB()
     self.lock_key = "id_lock"