Beispiel #1
0
def clear_redis(logger):
    logger.info("reset redis")
    from IO.redisDB import RedisDB
    redisDB = RedisDB()
    redisDB.reset()
    redisDB.set("time", time.time())
    return redisDB
Beispiel #2
0
 def __init__(self, internal, topic_params, config, generic_name, id,
              buffer, dT, base_value_flag):
     redisDB = RedisDB()
     self.logger = MessageLogger.get_logger(__name__, id)
     self.generic_name = generic_name
     self.buffer = buffer
     self.dT = dT
     self.base_value_flag = base_value_flag
     if "detachable" in topic_params.keys():
         self.detachable = topic_params["detachable"]
     else:
         self.detachable = False
     if "reuseable" in topic_params.keys():
         self.reuseable = topic_params["reuseable"]
     else:
         self.reuseable = False
     self.start_of_day = datetime.datetime.now().replace(
         hour=0, minute=0, second=0, microsecond=0).timestamp()
     self.total_steps_in_day = floor(24 * 60 * 60 / self.dT)
     self.current_day_index = 0
     self.number_of_bucket_days = int(buffer / self.total_steps_in_day)
     self.bucket_index = False
     self.length = 1
     try:
         super().__init__(internal, topic_params, config, id=id)
     except Exception as e:
         redisDB.set("Error mqtt" + self.id, True)
         self.logger.error(e)
 def __init__(self, topic_params, config, buffer, save_path, topic_name, id,
              load_file_data, max_file_size_mins, influxDB):
     self.file_path = save_path
     redisDB = RedisDB()
     try:
         super().__init__(False, topic_params, config, [], id)
     except Exception as e:
         redisDB.set("Error mqtt" + self.id, True)
         logger.error(e)
     self.influxDB = influxDB
     self.buffer_data = []
     self.buffer = buffer
     self.current_minute = None
     self.id = id
     self.sum = 0
     self.count = 0
     self.minute_data = []
     self.topic_name = topic_name
     self.max_file_size_mins = max_file_size_mins
     self.save_cron_freq = config.getint("IO",
                                         "raw.data.file.save.frequency.sec",
                                         fallback=3600)
     self.copy_file_data_to_influx()
     if load_file_data:
         self.load_data()
     self.file_save_thread = threading.Thread(target=self.save_to_file_cron,
                                              args=(self.save_cron_freq, ))
     self.file_save_thread.start()
Beispiel #4
0
 def __init__(self, config, id, topic_name, dT_in_seconds,
              control_frequency, horizon_in_steps,
              prediction_data_file_container, raw_data_file_container,
              topic_params, error_result_file_path, output_config,
              influxDB):
     self.logger = MessageLogger.get_logger(__name__, id)
     self.control_frequency = control_frequency
     self.horizon_in_steps = horizon_in_steps
     self.dT_in_seconds = dT_in_seconds
     self.raw_data_file_container = raw_data_file_container
     self.raw_data = RawDataReader()
     self.stopRequest = threading.Event()
     self.topic_name = topic_name
     self.id = id
     self.prediction_data_file_container = prediction_data_file_container
     self.error_result_file_path = error_result_file_path
     self.output_config = output_config
     self.topic_params = topic_params
     self.influxDB = influxDB
     redisDB = RedisDB()
     try:
         if self.update_topic_params():
             super().__init__(False, self.topic_params, config,
                              control_frequency, id)
         else:
             super().__init__(True, self.topic_params, config,
                              control_frequency, id)
     except Exception as e:
         redisDB.set("Error mqtt" + self.id, True)
         self.logger.error(e)
 def exit_gracefully(self, signum, frame):
     print("\nReceived {} signal".format(self.signals[signum]))
     print("Cleaning up resources. End of the program")
     from IO.redisDB import RedisDB
     redisDB = RedisDB()
     redisDB.set("End ofw", "True")
     time.sleep(6)
     self.kill_now = True
class PVForecastPublisher(DataPublisher):

    def __init__(self, internal_topic_params, config, id, control_frequency, horizon_in_steps, dT_in_seconds, q):
        self.logger = MessageLogger.get_logger(__name__, id)
        self.pv_data = {}
        self.q = q
        self.control_frequency = control_frequency
        self.horizon_in_steps = horizon_in_steps
        self.dT_in_seconds = dT_in_seconds
        self.topic = "P_PV"
        self.redisDB = RedisDB()
        self.id = id
        try:
            super().__init__(True, internal_topic_params, config, control_frequency, id)
        except Exception as e:
            self.redisDB.set("Error mqtt" + self.id, True)
            self.logger.error(e)

    def get_data(self):
        #  check if new data is available
        if not self.redisDB.get_bool(Constants.get_data_flow_key(self.id)):
            return None
        self.logger.debug("Getting PV data from Queue")
        if not self.q.empty():
            try:
                new_data = self.q.get_nowait()
                self.logger.debug("new data "+str(new_data))
                self.q.task_done()
                self.pv_data = new_data
                self.logger.debug("extract pv data")
                data = self.convert_to_senml()
                return data
            except Exception:
                self.logger.error("Queue empty")
        else:
            self.logger.debug("PV Queue empty")
            return None

    def convert_to_senml(self):
        meas = []
        if len(self.pv_data) > 0:
            for row in self.pv_data:
                meas.append(self.get_senml_meas(float(row[1]), row[0]))
        doc = senml.SenMLDocument(meas)
        val = doc.to_json()
        return json.dumps(val)

    def get_senml_meas(self, value, time):
        if not isinstance(time, float):
            time = float(time.timestamp())
        meas = senml.SenMLMeasurement()
        meas.time = time
        meas.value = value
        meas.name = self.topic
        return meas
 def __init__(self, internal, topic_params, config, generic_name, id,
              event_callback):
     redisDB = RedisDB()
     self.logger = MessageLogger.get_logger(__name__, id)
     self.generic_name = generic_name
     self.event_callback = event_callback
     try:
         super().__init__(internal, topic_params, config, id=id)
     except Exception as e:
         redisDB.set("Error mqtt" + self.id, True)
         self.logger.error(e)
def framework_stop(id):  # noqa: E501
    """Command for stoping the framework

     # noqa: E501

    :param id: Id of the registry to be stopped
    :type id: str

    :rtype: None
    """
    try:
        redis_db = RedisDB()
        flag = redis_db.get("run:" + id)
        logger.debug("Flag " + str(flag))
        message = ""
        code = 200
        if flag is not None and flag == "running":
            logger.debug("System running and trying to stop")
            redis_db.set("run:" + id, "stop")
            time.sleep(1)
            flag = redis_db.get("run:" + id)
            logger.debug("Flag in stop: " + str(flag))

            if flag is "stopped" or None:  # TODO: is none necessary?
                logger.debug("System stopped succesfully")
                message = "System stopped succesfully"
            elif "stopping" in flag:
                message = "System stopped succesfully"
                counter = 0
                while ("stopping" in flag):
                    flag = redis_db.get("run:" + id)
                    counter = counter + 1
                    if counter >= 15:
                        message = "system stopped succesfully"
                        break
                    else:
                        time.sleep(1)
                logger.debug("System stopped succesfully")
            else:
                message = "Problems while stopping the system"
                code = 500
        elif flag is not None and flag == "stopped":
            logger.debug("System already stopped")
            message = "System already stopped"
        elif flag is None:
            logger.debug("System already stopped")
            message = "System already stopped"
    except Exception as e:
        logger.error(e)
        message = "Error stoping the system"
        code = 500
    return message, code
Beispiel #9
0
 def __init__(self, internal_topic_params, config, id, control_frequency,
              horizon_in_steps, dT_in_seconds, q):
     self.logger = MessageLogger.get_logger(__name__, id)
     self.pv_data = {}
     self.q = q
     self.control_frequency = control_frequency
     self.horizon_in_steps = horizon_in_steps
     self.dT_in_seconds = dT_in_seconds
     self.topic = "P_PV"
     try:
         super().__init__(True, internal_topic_params, config,
                          control_frequency, id)
     except Exception as e:
         redisDB = RedisDB()
         redisDB.set("Error mqtt" + self.id, True)
         self.logger.error(e)
 def __init__(self, internal_topic_params, config, queue, publish_frequency,
              topic, id, horizon_in_steps, dT_in_seconds):
     self.logger = MessageLogger.get_logger(__name__, id)
     self.load_data = {}
     self.flag = True
     self.file_path = os.path.join("/usr/src/app", "optimization",
                                   "loadData.dat")
     self.q = queue
     self.topic = topic
     self.horizon_in_steps = horizon_in_steps
     self.dT_in_seconds = dT_in_seconds
     try:
         super().__init__(True, internal_topic_params, config,
                          publish_frequency, id)
     except Exception as e:
         redisDB = RedisDB()
         redisDB.set("Error mqtt" + self.id, True)
         self.logger.error(e)
 def __init__(self, topic_params, config, buffer, training_data_size,
              save_path, topic_name, id):
     self.file_path = save_path
     redisDB = RedisDB()
     try:
         super().__init__(False, topic_params, config, [], id)
     except Exception as e:
         redisDB.set("Error mqtt" + self.id, True)
         logger.error(e)
     self.buffer_data = []
     self.buffer = buffer
     self.training_data_size = training_data_size
     self.current_minute = None
     self.id = id
     self.sum = 0
     self.count = 0
     self.minute_data = []
     self.topic_name = topic_name
     self.load_data()
     self.file_save_thread = threading.Thread(target=self.save_to_file_cron)
     self.file_save_thread.start()
class BaseDataReceiver(DataReceiver, ABC):

    def __init__(self, internal, topic_params, config, generic_name, id, buffer, dT, base_value_flag):
        self.id = id
        self.redisDB = RedisDB()
        self.logger = MessageLogger.get_logger(__name__, id)
        self.generic_name = generic_name
        self.buffer = buffer
        self.dT = dT
        self.base_value_flag = base_value_flag
        self.set_data_update(False)

        persist_real_data_path = config.get("IO","persist.real.data.path",
                                                 fallback="optimization/resources")
        persist_real_data_path = os.path.join("/usr/src/app", persist_real_data_path, id, "real")
        self.persist_real_data_file = os.path.join(persist_real_data_path, generic_name+".txt")

        if "detachable" in topic_params.keys():
            self.detachable = topic_params["detachable"]
        else:
            self.detachable = False
        if self.detachable:
            self.value_used_once = False
        if "reuseable" in topic_params.keys():
            self.reuseable = topic_params["reuseable"]
        else:
            self.reuseable = False
        if self.reuseable and not os.path.exists(persist_real_data_path):
            os.makedirs(persist_real_data_path)

        self.start_of_day = datetime.datetime.now().replace(hour=0, minute=0, second=0, microsecond=0).timestamp()
        self.total_steps_in_day = floor(24 * 60 * 60 / self.dT)
        self.current_day_index = 0
        self.number_of_bucket_days = int(buffer / self.total_steps_in_day)
        self.bucket_index = False
        self.length = 1

        try:
            super(BaseDataReceiver, self).__init__(internal, topic_params, config, id=id)
        except Exception as e:
            self.redisDB.set("Error mqtt" + id, True)
            self.logger.error(e)

        if self.reuseable:
            formated_data = self.read_data()
            if formated_data is not None and len(formated_data) > 0:
                self.length = len(formated_data)
                self.data.update(formated_data)
                self.set_data_update(True)
                self.last_time = time.time()

        
    def on_msg_received(self, payload):
        try:
            self.start_of_day = datetime.datetime.now().replace(hour=0, minute=0, second=0, microsecond=0).timestamp()
            print_details = False
            if "chargers" in payload:
                self.logger.debug("data received for charger = "+str(payload))
            senml_data = json.loads(payload)
            formated_data = self.add_formated_data(senml_data)
            if self.reuseable:
                self.save_data(formated_data)
            self.data.update(formated_data)
            self.set_data_update(True)
            self.last_time = time.time()
        except Exception as e:
            self.logger.error(e)
    
    def save_data(self, formated_data):
        keys = list(formated_data.keys())
        sorted(keys)
        values = [formated_data[key] for key in keys]
        with open(self.persist_real_data_file, "w") as f:
            value = "\n".join(map(str, values))
            f.writelines(value)
            self.logger.debug("saved real reuseable data to file "+self.persist_real_data_file)

    def read_data(self):
        if os.path.exists(self.persist_real_data_file):
            with open(self.persist_real_data_file, "r") as f:
                data = f.readlines()
                formated_data = {}
                bucket = 0
                for row in data:
                    bucket_key = str(self.current_day_index) + "_" + str(bucket)
                    formated_data[bucket_key] = float(row)
                    bucket += 1
                    if bucket >= self.total_steps_in_day:
                        bucket = 0
                        self.current_day_index += 1
                        if self.current_day_index >= self.number_of_bucket_days:
                            self.current_day_index = 0
                return formated_data
        return None


    def add_formated_data(self, json_data):
        doc = None
        try:
            doc = senml.SenMLDocument.from_json(json_data)
        except Exception as e:
            pass
        if not doc:
            try:
                meas = senml.SenMLMeasurement.from_json(json_data)
                doc = senml.SenMLDocument([meas])
            except Exception as e:
                pass

        if doc:
            base_data = doc.base
            bn, bu = None, None
            if base_data:
                bn = base_data.name
                bu = base_data.unit
            data = {}
            raw_data = []
            doc.measurements = sorted(doc.measurements, key=lambda x: x.time)
            if len(doc.measurements) > 0:
                for meas in doc.measurements:
                    n = meas.name
                    u = meas.unit
                    v = meas.value
                    t = meas.time
                    t = self.time_conversion(t)
                    if not u:
                        u = bu
                    # dont check bn
                    if not n:
                        n = self.generic_name
                    try:
                        processed_value = self.preprocess_data(bn, n, v, u)
                        if processed_value is not None and processed_value is not {}:
                            raw_data.append([t, processed_value])
                    except Exception as e:
                        self.logger.error("error " + str(e) + "  n = " + str(n))
                #self.logger.debug("raw data: " + str(raw_data))
                raw_data = TimeSeries.expand_and_resample(raw_data, self.dT, True)
                if len(raw_data) > 0:
                    self.length = len(raw_data)
                    bucket = self.time_to_bucket(raw_data[0][0])
                    for row in raw_data:
                        bucket_key = str(self.current_day_index) + "_" + str(bucket)
                        bucket += 1
                        if bucket >= self.total_steps_in_day:
                            bucket = 0
                            self.current_day_index += 1
                            if self.current_day_index >= self.number_of_bucket_days:
                                self.current_day_index = 0
                        data[bucket_key] = row[1]
            return data
        return {}

    @abstractmethod
    def preprocess_data(self, base, name, value, unit):
        return value

    def iterative_init(self, d, v):
        if len(v) <= 0:
            return d
        d = self.iterative_init({v[-1]: d}, v[:-1])
        return d

    def get_bucket_aligned_data(self, bucket, steps, wait_for_data=True, check_bucket_change=True):
        bucket_requested = bucket
        self.logger.info("Get "+str(self.generic_name)+" data for bucket = "+str(bucket_requested))
        bucket_available = True
        if self.base_value_flag:
            final_data = self.iterative_init({}, self.generic_name.split("/"))
        else:
            final_data = {self.generic_name: {}}

        #TODO: figure out every use case
        if self.detachable and self.reuseable:
            data = self.get_data(require_updated=2)
        elif self.detachable and not self.value_used_once:
            data = self.get_data(require_updated=2)
        elif self.detachable:
            data = self.get_data(require_updated=2, clearData=True)
        elif self.reuseable:
            data = self.get_data(require_updated=1)
        elif wait_for_data:
            data = self.get_data(require_updated=0)
        else:
            data = self.get_data(require_updated=1)

        if not self.redisDB.get("End ofw") == "True":
            self.logger.debug(str(self.generic_name) + " data from mqtt is : "+ json.dumps(data, indent=4))
            self.logger.debug(str(self.generic_name) + " steps: "+str(steps) + " length: "+str(self.length))
            if steps > self.length:
                steps = self.length
            day = None
            self.logger.debug(str(self.generic_name) + " steps: " + str(steps))
            if len(data) >= steps:
                for i in reversed(range(self.number_of_bucket_days+1)):
                    key = str(i) + "_" + str(bucket)
                    self.logger.debug("key in data: "+str(key)+" for "+str(self.generic_name))
                    if key in data.keys():
                        day = str(i)
                        break
                if day is None and self.detachable and not self.value_used_once and self.last_time > 0:
                    self.logger.debug("Day set to 0 for detachable for " + str(self.generic_name))
                    day = "0"
                if day is None and self.detachable:
                    self.logger.debug("Ignoring day for detachable for " + str(self.generic_name))
                    pass
                elif day is None:
                    bucket_available = False
                    self.logger.debug("Setting bucket available to False. Day is None for " + str(self.generic_name))
                else:
                    new_data = {}
                    index = 0
                    while len(new_data) < steps:
                        bucket_key = day + "_" + str(bucket)
                        if bucket_key in data.keys():
                            new_data[index] = data[bucket_key]
                            index += 1
                        bucket += 1
                        if bucket >= self.total_steps_in_day:
                            bucket = 0
                            day_i = int(day) + 1
                            if day_i >= self.number_of_bucket_days:
                                day_i = 0
                            day = str(day_i)
                    self.logger.debug("base_value_flag "+str(self.base_value_flag)+" for "+str(self.generic_name))
                    if self.base_value_flag:
                        for k, v in new_data.items():
                            if isinstance(v, dict):
                                final_data.update(v)
                    else:
                        final_data = {self.generic_name: new_data}
            if check_bucket_change:
                self.logger.debug("check_bucket_change flag: "+str(check_bucket_change)+ " for "+str(self.generic_name))
                new_bucket = self.time_to_bucket(datetime.datetime.now().timestamp())
                if new_bucket > bucket_requested:
                    self.logger.debug("bucket changed from " + str(bucket_requested) +
                                      " to " + str(new_bucket) + " due to wait time for " + str(self.generic_name))
                    final_data, bucket_available, _ = self.get_bucket_aligned_data(new_bucket, steps, wait_for_data=False, check_bucket_change=False)
        else:
            self.logger.debug("End ofw in redis is True")
            
        if self.detachable and bucket_available:
            self.value_used_once = True
        return (final_data, bucket_available, self.last_time)

    def time_conversion(self, time):
        t = str(time)
        l = len(t)
        if "." in t:
            l = t.find(".")
        if l > 10:
            new_t = time / (10 ** (l - 10))
            return new_t
        else:
            return time

    def time_to_bucket(self, time):
        bucket = floor((time - self.start_of_day) / self.dT)
        if bucket > self.total_steps_in_day:
            bucket = self.total_steps_in_day
        elif bucket < 0:
            bucket = bucket%self.total_steps_in_day
            self.logger.warning("Received data is of older timestamp = "+str(time)+
                                " than start of today = "+str(self.start_of_day)+" for "+str(self.generic_name)+
                                ". set to bucket "+str(bucket)+" with total buckets "+str(self.total_steps_in_day))
        return bucket

    def get_current_bucket_data(self, steps, wait_for_data=True, check_bucket_change=True):
        bucket = self.time_to_bucket(datetime.datetime.now().timestamp())
        self.logger.debug("current b = "+str(bucket))
        return self.get_bucket_aligned_data(bucket, steps, wait_for_data, check_bucket_change)
class ThreadFactory:
    def __init__(self, model_name, control_frequency, horizon_in_steps,
                 dT_in_seconds, repetition, solver, id, optimization_type,
                 single_ev):
        self.logger = MessageLogger.get_logger(__name__, id)
        self.model_name = model_name
        self.control_frequency = control_frequency
        self.horizon_in_steps = horizon_in_steps
        self.dT_in_seconds = dT_in_seconds
        self.repetition = repetition
        self.solver = solver
        self.id = id
        self.optimization_type = optimization_type
        self.single_ev = single_ev
        self.redisDB = RedisDB()
        self.pyro_mip_server = None

    def getFilePath(self, dir, file_name):
        # print(os.path.sep)
        # print(os.environ.get("HOME"))
        project_dir = os.path.dirname(os.path.realpath(__file__))
        data_file = os.path.join("/usr/src/app", dir, file_name)
        return data_file

    def startOptControllerThread(self):
        self.logger.info("Creating optimization controller thread")
        self.logger.info("Number of repetitions: " + str(self.repetition))
        self.logger.info("Output with the following control_frequency: " +
                         str(self.control_frequency))
        self.logger.info(
            "Optimization calculated with the following horizon_in_steps: " +
            str(self.horizon_in_steps))
        self.logger.info(
            "Optimization calculated with the following dT_in_seconds: " +
            str(self.dT_in_seconds))
        self.logger.info("Optimization calculated with the following model: " +
                         self.model_name)
        self.logger.info(
            "Optimization calculated with the following solver: " +
            self.solver)
        self.logger.info(
            "Optimization calculated with the following optimization_type: " +
            self.optimization_type)

        self.redisDB.set("Error mqtt" + self.id, False)
        self.logger.debug("Error mqtt " +
                          str(self.redisDB.get("Error mqtt" + self.id)))

        # Creating an object of the configuration file (standard values)
        try:
            config = configparser.RawConfigParser()
            config.read(
                self.getFilePath("optimization/resources",
                                 "ConfigFile.properties"))
        except Exception as e:
            self.logger.error(e)

        # Loads the solver name if it was not given thorough the endpoint command/start/id
        if not self.model_name:
            self.model_name = config.get("SolverSection", "model.name")
        self.logger.debug("This is the model name: " + self.model_name)
        self.model_path = os.path.join(
            config.get("SolverSection", "model.base.path"),
            self.model_name) + ".py"
        self.logger.debug("This is the path of the model: " +
                          str(self.model_path))

        # Loads the solver name if not specified in command/start/id
        if not self.solver:
            self.solver_name = config.get("SolverSection", "solver.name")
        else:
            self.solver_name = self.solver
        self.logger.debug(
            "Optimization calculated with the following solver: " +
            self.solver_name)

        ##############################################################################################
        output_config = None
        try:
            # Reads the registry/output and stores it into an object
            path = os.path.join(os.getcwd(), "optimization/resources",
                                str(self.id), "Output.registry.mqtt")
            if not os.path.exists(path):
                self.logger.debug(
                    "Output.registry.mqtt not set, only file output available")
            else:
                with open(path, "r") as file:
                    output_config = json.loads(file.read())
        except Exception as e:
            self.logger.error(
                "Output.registry.mqtt not set, only file output available")

        try:
            # Reads the registry/input and stores it into an object
            path = os.path.join(os.getcwd(), "optimization/resources",
                                str(self.id), "Input.registry.file")
            if not os.path.exists(path):
                input_config_file = {}
                self.logger.debug("Not Input.registry.file present")
            else:
                with open(path, "r") as file:
                    input_config_file = json.loads(file.read())
                self.logger.debug("Input.registry.file found")
        except Exception as e:
            self.logger.error("Input file not found")
            input_config_file = {}
            self.logger.error(e)

        try:
            # Reads the registry/input and stores it into an object
            path = os.path.join(os.getcwd(), "optimization/resources",
                                str(self.id), "Input.registry.mqtt")
            if not os.path.exists(path):
                input_config_mqtt = {}
                self.logger.debug("Not Input.registry.mqtt present")
            else:
                with open(path, "r") as file:
                    input_config_mqtt = json.loads(file.read())
                self.logger.debug("Input.registry.mqtt found")
        except Exception as e:
            self.logger.error("Input file not found")
            input_config_mqtt = {}
            self.logger.error(e)

        input_config_parser = InputConfigParser(input_config_file,
                                                input_config_mqtt,
                                                self.model_name, self.id,
                                                self.optimization_type)

        missing_keys = input_config_parser.check_keys_for_completeness()
        if len(missing_keys) > 0:
            raise MissingKeysException(
                "Data source for following keys not declared: " +
                str(missing_keys))

        self.prediction_threads = {}
        self.prediction_names = input_config_parser.get_prediction_names()
        if self.prediction_names is not None and len(
                self.prediction_names) > 0:
            for prediction_name in self.prediction_names:
                flag = input_config_parser.get_forecast_flag(prediction_name)
                if flag:
                    self.logger.info(
                        "Creating prediction controller thread for topic " +
                        str(prediction_name))
                    topic_param = input_config_parser.get_params(
                        prediction_name)
                    parameters = json.dumps({
                        "control_frequency": self.control_frequency,
                        "horizon_in_steps": self.horizon_in_steps,
                        "topic_param": topic_param,
                        "dT_in_seconds": self.dT_in_seconds
                    })
                    self.redisDB.set(
                        "train:" + self.id + ":" + prediction_name, parameters)
                    self.prediction_threads[prediction_name] = LoadPrediction(
                        config, self.control_frequency, self.horizon_in_steps,
                        prediction_name, topic_param, self.dT_in_seconds,
                        self.id, True)
                    # self.prediction_threads[prediction_name].start()

        self.non_prediction_threads = {}
        self.non_prediction_names = input_config_parser.get_non_prediction_names(
        )
        if self.non_prediction_names is not None and len(
                self.non_prediction_names) > 0:
            for non_prediction_name in self.non_prediction_names:
                flag = input_config_parser.get_forecast_flag(
                    non_prediction_name)
                if flag:
                    if non_prediction_name == "P_PV":
                        self.non_prediction_threads[
                            non_prediction_name] = PVPrediction(
                                config, input_config_parser, self.id,
                                self.control_frequency, self.horizon_in_steps,
                                self.dT_in_seconds, non_prediction_name)
                        self.non_prediction_threads[non_prediction_name].start(
                        )

        # Initializing constructor of the optimization controller thread
        if self.optimization_type == "MPC":
            self.opt = OptControllerMPC(
                self.id, self.solver_name, self.model_path,
                self.control_frequency, self.repetition, output_config,
                input_config_parser, config, self.horizon_in_steps,
                self.dT_in_seconds, self.optimization_type)
        elif self.optimization_type == "discrete":
            self.opt = OptControllerDiscrete(
                self.id, self.solver_name, self.model_path,
                self.control_frequency, self.repetition, output_config,
                input_config_parser, config, self.horizon_in_steps,
                self.dT_in_seconds, self.optimization_type)
        elif self.optimization_type == "stochastic":
            self.opt = OptControllerStochastic(
                self.id, self.solver_name, self.model_path,
                self.control_frequency, self.repetition, output_config,
                input_config_parser, config, self.horizon_in_steps,
                self.dT_in_seconds, self.optimization_type, self.single_ev)

        try:
            ####starts the optimization controller thread
            self.logger.debug("Mqtt issue " +
                              str(self.redisDB.get("Error mqtt" + self.id)))
            if "False" in self.redisDB.get("Error mqtt" + self.id):
                self.opt.start()
                self.logger.debug("Optimization object started")
                return 0
            else:
                self.redisDB.set("run:" + self.id, "stopping")
                self.stopOptControllerThread()
                self.redisDB.set("run:" + self.id, "stopped")
                self.logger.error("Optimization object could not be started")
                return 1
        except Exception as e:
            self.logger.error(e)
            return 1

    def stopOptControllerThread(self):
        try:
            # stop as per ID
            for name, obj in self.prediction_threads.items():
                self.redisDB.remove("train:" + self.id + ":" + name)
                obj.Stop()
            for name, obj in self.non_prediction_threads.items():
                obj.Stop()
                del obj
            self.logger.info("Stopping optimization controller thread")
            self.opt.Stop()
            del self.opt
            self.logger.info("Optimization controller thread stopped")
            del self.logger
            return "Optimization controller thread stopped"
        except Exception as e:
            self.logger.error(e)
            return e

    def is_running(self):
        return not self.opt.get_finish_status()

    def update_training_params(self, key, parameters):
        while True:
            self.redisDB.set(key, parameters)
            time.sleep("60")
def framework_start(id, startOFW):  # noqa: E501
    """Command for starting the framework

     # noqa: E501

    :param id: Id of the registry to be started
    :type id: str
    :param startOFW: Start command for the optimization framework   repetitions: -1 infinite repetitions
    :type startOFW: dict | bytes

    :rtype: None
    """

    available_solvers = ["ipopt", "glpk", "bonmin", "gurobi", "cbc"]
    available_optimizers = ["discrete", "stochastic", "MPC"]
    response_msg = ""
    response_code = 200
    if connexion.request.is_json:
        logger.info("Starting the system")
        startOFW = Start.from_dict(connexion.request.get_json())
        models = get_models()
        if startOFW.model_name != "" and startOFW.model_name not in models:
            response_msg = "Model not available. Available models are :" + str(
                models)
            response_code = 400
        elif startOFW.solver not in available_solvers:
            response_msg = "Use one of the following solvers :" + str(
                available_solvers)
            response_code = 400
        elif startOFW.optimization_type not in available_optimizers:
            response_msg = "Use one of the following optimizer types : " + str(
                available_optimizers)
            response_code = 400
        else:
            dir = os.path.join(os.getcwd(), "optimization/resources", str(id))
            if not os.path.exists(dir):
                response_msg = "Id not existing"
                response_code = 400
            else:
                redis_db = RedisDB()
                flag = redis_db.get("run:" + id)
                if flag is not None and flag == "running":
                    response_msg = "System already running"
                else:
                    try:
                        msg = variable.start(id, startOFW)
                        if msg == 0:
                            response_msg = "System started succesfully"
                        else:
                            response_msg = "System could not start"
                            response_code = 400
                    except (InvalidModelException, MissingKeysException,
                            InvalidMQTTHostException) as e:
                        logger.error("Error " + str(e))
                        redis_db.set("run:" + id, "stopped")
                        response_msg = str(e)
                        response_code = 400
    else:
        response_msg = "Wrong Content-Type"
        response_code = 400
        logger.error("Wrong Content-Type")
    return response_msg, response_code
class CommandController:
    _instance = None
    _lock = threading.Lock()

    def __new__(cls):
        if CommandController._instance is None:
            with CommandController._lock:
                if CommandController._instance is None:
                    CommandController._instance = super(
                        CommandController, cls).__new__(cls)
        return CommandController._instance

    def __init__(self):
        self.factory = {}
        self.statusThread = {}
        self.running = {}
        self.redisDB = RedisDB()
        self.lock_key = "id_lock"

    def set(self, id, object):
        self.factory[id] = object

    def get_length_factory(self):
        return len(self.factory)

    def get(self, id):
        return self.factory[id]

    def set_isRunning(self, id, bool):
        self.running[id] = bool

    def isRunningExists(self):
        logger.debug("IsRunning exists: " + str(len(self.running)))
        if len(self.running):
            return True
        else:
            return False

    def get_isRunning(self, id):
        if id in self.running.keys():
            return self.running[id]
        else:
            return False

    def get_running(self):
        return self.running

    def get_statusThread(self, id):
        return self.statusThread[id]

    def start(self, id, json_object, dict_object=None):
        logger.debug(str(json_object))
        if json_object is not None:
            self.model_name = json_object.model_name
            self.control_frequency = json_object.control_frequency
            self.horizon_in_steps = json_object.horizon_in_steps
            self.dT_in_seconds = json_object.d_t_in_seconds
            self.repetition = json_object.repetition
            self.solver = json_object.solver
            self.optimization_type = json_object.optimization_type
            self.single_ev = json_object.single_ev
        elif dict_object is not None:
            self.model_name = dict_object["model"]
            self.control_frequency = dict_object["control_frequency"]
            self.horizon_in_steps = dict_object["horizon_in_steps"]
            self.dT_in_seconds = dict_object["dT_in_seconds"]
            self.repetition = dict_object["repetition"]
            self.solver = dict_object["solver"]
            self.optimization_type = dict_object["optimization_type"]
            self.single_ev = dict_object["single_ev"]

        self.set(
            id,
            ThreadFactory(self.model_name, self.control_frequency,
                          self.horizon_in_steps, self.dT_in_seconds,
                          self.repetition, self.solver, id,
                          self.optimization_type, self.single_ev))

        logger.info("Thread: " + str(self.get(id)))
        self.redisDB.set("run:" + id, "starting")
        msg = self.get(id).startOptControllerThread()
        logger.debug("Answer from Thread factory" + str(msg))
        if msg == 0:
            self.set_isRunning(id, True)
            logger.debug("Flag isRunning set to True")
            self.statusThread[id] = threading.Thread(target=self.run_status,
                                                     args=(id, ))
            logger.debug("Status of the Thread started")
            self.statusThread[id].start()
            meta_data = {
                "id": id,
                "model": self.model_name,
                "control_frequency": self.control_frequency,
                "horizon_in_steps": self.horizon_in_steps,
                "dT_in_seconds": self.dT_in_seconds,
                "repetition": self.repetition,
                "solver": self.solver,
                "optimization_type": self.optimization_type,
                "single_ev": self.single_ev,
                "ztarttime": time.time()
            }
            self.redisDB.set("run:" + id, "running")
            IDStatusManager.persist_id(id, True, meta_data, self.redisDB)
            logger.info("running status " + str(self.running))
            logger.debug("Command controller start finished")
            return 0
        else:
            self.set_isRunning(id, False)
            logger.debug("Flag isRunning set to False")
            IDStatusManager.persist_id(id, False, None, self.redisDB)
            self.factory[id].stopOptControllerThread()
            self.redisDB.set("run:" + id, "stopped")
            logger.error("Command controller start could not be finished")
            # logger.debug("System stopped succesfully")
            return 1

    def stop(self, id):

        logger.debug("Stop signal received")
        logger.debug("This is the factory object: " + str(self.get(id)))
        if self.factory[id]:
            IDStatusManager.persist_id(id, False, None, self.redisDB)
            self.factory[id].stopOptControllerThread()
            del self.factory[id]
            del self.statusThread[id]
            #self.stop_pyro_servers()
            #self.stop_name_servers()
            self.set_isRunning(id, False)
            message = "System stopped succesfully"
            self.redisDB.set("run:" + id, "stopped")
            logger.debug(message)
            gc.collect()
        else:
            message = "No threads found"
            logger.debug(message)

    def run_status(self, id):
        while True:
            status = self.get(id).is_running()
            flag = self.redisDB.get("run:" + id)
            if not status or (flag is not None and flag == "stop"):
                self.redisDB.set("run:" + id, "stopping")
                self.stop(id)
                break
            time.sleep(1)

    def restart_ids(self):
        old_ids, stopped_ids = IDStatusManager.instances_to_restart(
            self.redisDB)
        for s in old_ids:
            val = json.loads(s)
            try:
                self.start(val["id"], None, val)
            except (InvalidModelException, MissingKeysException,
                    InvalidMQTTHostException) as e:
                # TODO: should we catch these exceptions here?
                logger.error("Error " + str(e))
                self.redisDB.set("run:" + val["id"], "stopped")
                return str(e)
        for s in stopped_ids:
            val = json.loads(s)
            id = val["id"]
            self.redisDB.set("run:" + id, "stopped")
            self.redisDB.set(Constants.id_meta + ":" + id, json.dumps(val))

    def get_status(self):
        status = {}
        keys = self.redisDB.get_keys_for_pattern("run:*")
        if keys is not None:
            for key in keys:
                value = self.redisDB.get(key)
                id = key[4:]
                status[id] = {}
                if value is None or (value is not None and value == "stopped"):
                    status[id]["status"] = "stopped"
                elif value == "running":
                    status[id]["status"] = "running"
                elif value == "stop" or value == "stopping":
                    status[id]["status"] = "stopping"
                elif value == "starting":
                    status[id]["status"] = "starting"
        keys = self.redisDB.get_keys_for_pattern(Constants.id_meta + ":*")
        if keys is not None:
            for key in keys:
                value = self.redisDB.get(key)
                id = key[8:]
                if id not in status.keys():
                    status[id] = {}
                    status[id]["status"] = "stopped"
                status[id]["config"] = {}
                if value is not None:
                    status[id]["config"].update(json.loads(value))
                    # logger.debug("status id config "+str(status))
                    if "ztarttime" in status[id]["config"].keys():
                        status[id]["start_time"] = status[id]["config"][
                            "ztarttime"]
                        status[id]["config"].pop("ztarttime")
                    if "model" in status[id]["config"].keys():
                        status[id]["config"]["model_name"] = status[id][
                            "config"]["model"]
                        status[id]["config"].pop("model")
        return status
class OutputController:

    def __init__(self, id=None, output_config=None):
        self.logger = MessageLogger.get_logger(__name__, id)
        self.logger.info("Output Class started")
        self.output_config = output_config
        self.mqtt = {}
        self.redisDB = RedisDB()
        self.mqtt_params = {}
        self.output_mqtt = {}
        self.id = id
        self.config_parser_utils = ConfigParserUtils()
        self.logger.debug("output_config: " + str(self.output_config) + " " + str(type(self.output_config)))
        if self.output_config is not None:
            self.extract_mqtt_params()
            self.init_mqtt()

    def extract_mqtt_params(self):
        self.logger.debug("Output config = " + str(self.output_config))
        for key, value in self.output_config.items():
            self.logger.debug("key " + str(key) + " value " + str(value))
            for key2, value2 in value.items():
                self.logger.debug("key2 " + str(key2) + " value2 " + str(value2))
                mqtt = self.config_parser_utils.get_mqtt(value2)
                unit, horizon_values = self.read_extra_values(value2)
                if mqtt is not None:
                    self.mqtt_params[key2] = mqtt.copy()
                    self.mqtt_params[key2]["unit"] = unit
                    self.mqtt_params[key2]["horizon_values"] = horizon_values
        self.logger.debug("params = " + str(self.mqtt_params))

    def read_extra_values(self, value2):
        unit = None
        horizon_values = False
        if isinstance(value2, dict):
            if "unit" in value2.keys():
                unit = value2["unit"]
            if "horizon_values" in value2.keys():
                horizon_values = value2["horizon_values"]
        return unit, horizon_values

    def init_mqtt(self):
        ###Connection to the mqtt broker
        self.logger.debug("Starting init mqtt")
        self.redisDB.set("Error mqtt" + self.id, False)
        try:
            for key, value in self.mqtt_params.items():
                self.logger.debug("key " + str(key) + " value " + str(value))

                # self.output_mqtt[key2] = {"host":host, "topic":topic, "qos":qos}
                client_id = "client_publish" + str(randrange(100000)) + str(time.time()).replace(".", "")
                host = str(value["host"])
                port = value["mqtt.port"]
                self.logger.debug("client " + client_id)
                self.logger.debug("host " + host)
                self.logger.debug("port " + str(port))
                client_key = host+":"+str(port)
                if client_key not in self.mqtt.keys():
                    self.mqtt[client_key] = MQTTClient(str(host), port, client_id,
                                            username=value["username"], password=value["password"],
                                            ca_cert_path=value["ca_cert_path"], set_insecure=value["insecure"], id=self.id)
            self.logger.info("successfully subscribed")
        except Exception as e:
            self.logger.debug("Exception while starting mqtt")
            self.redisDB.set("Error mqtt" + self.id, True)
            self.logger.error(e)

    def publish_data(self, id, data, dT):
        self.logger.debug("output data : "+ json.dumps(data, indent=4))
        current_time = int(time.time())
        try:
            senml_data = self.senml_message_format(data, current_time, dT)
            for mqtt_key, value in senml_data.items():
                v = json.dumps(value)
                # self.logger.debug("key: "+str(key))
                # self.logger.debug("mqtt params: " + str(self.mqtt_params.keys()))
                if mqtt_key in self.mqtt_params.keys():
                    value2 = self.mqtt_params[mqtt_key]
                    topic = value2["topic"]
                    host = value2["host"]
                    port = value2["mqtt.port"]
                    qos = value2["qos"]
                    client_key = host + ":" + str(port)
                    self.mqtt[client_key].sendResults(topic, v, qos)
        except Exception as e:
            self.logger.error("error in publish data ", e)
        self.save_to_redis(id, data, current_time)

    def Stop(self):
        self.stop_request = True

        try:
            for key, value in self.mqtt_params.items():
                self.logger.debug("key " + str(key) + " value " + str(value))
                self.mqtt[key].MQTTExit()
            self.logger.info("OutputController safe exit")
        except Exception as e:
            self.logger.error(e)

    def senml_message_format(self, data, current_time, dT):
        new_data = {}
        # self.logger.debug("data for senml "+str(data))
        for key, value in data.items():
            flag = False
            time = current_time
            u = None
            base = None
            if isinstance(value, dict):
                bn, n, val = self.get_names(value)
            else:
                bn, n, val = None, None, value
            if bn:
                base = senml.SenMLMeasurement()
                base.name = bn
            if key in self.mqtt_params.keys():
                if self.mqtt_params[key]["unit"] is not None:
                    u = self.mqtt_params[key]["unit"]
                """
                else:
                    u = "W"
                """
                flag = self.mqtt_params[key]["horizon_values"]
            meas_list = []
            for v in val:
                meas = senml.SenMLMeasurement()
                meas.name = n
                meas.time = time
                meas.value = v
                if u:
                    meas.unit = u
                meas_list.append(meas)
                time += dT
                if not flag:
                    break  # only want the first value
            if len(meas_list) > 0:
                doc = senml.SenMLDocument(meas_list, base=base)
                new_data[key] = doc.to_json()
        # self.logger.debug("Topic MQTT Senml message: "+str(new_data))
        return new_data

    def save_to_redis(self, id, data, time):
        try:
            part_key = "o:" + id + ":"
            output_keys = self.redisDB.get_keys_for_pattern(part_key+"*")
            if output_keys is not None:
                for key in output_keys:
                    self.redisDB.remove(key)
            for key, value in data.items():
                key = key.replace("~","/")
                if isinstance(value, dict):
                    bn, n, val = self.get_names(value)
                else:
                    bn, n, val = None, key, value
                if bn:
                    n = bn + "/" + n
                index = 0
                for v in val:
                    k = part_key + n + ":" + str(index)
                    self.redisDB.set(k, json.dumps({str(time): v}))
                    index += 1
        except Exception as e:
            self.logger.error("error adding to redis " + str(e))

    def get_names(self, dict):
        bn = None
        n = None
        v = None
        if "bn" in dict.keys():
            bn = dict["bn"]
        if "n" in dict.keys():
            n = dict["n"]
        if "v" in dict.keys():
            v = dict["v"]
        return bn,n,v
Beispiel #17
0
class ControllerBase(ABC, threading.Thread):
    def __init__(self, id, solver_name, model_path, control_frequency,
                 repetition, output_config, input_config_parser, config,
                 horizon_in_steps, dT_in_seconds, optimization_type):
        super().__init__()

        pyomo_path = "/usr/src/app/logs/pyomo_" + str(id)
        if not os.path.exists(pyomo_path):
            os.makedirs(pyomo_path, mode=0o777, exist_ok=False)
            os.chmod(pyomo_path, 0o777)
        TempfileManager.tempdir = pyomo_path

        self.logger = MessageLogger.get_logger(__name__, id)
        self.logger.info("Initializing optimization controller " + id)
        self.id = id
        self.results = ""
        self.model_path = model_path
        self.solver_name = solver_name
        self.control_frequency = control_frequency
        self.repetition = repetition
        self.horizon_in_steps = horizon_in_steps
        self.dT_in_seconds = dT_in_seconds
        self.output_config = output_config
        self.input_config_parser = input_config_parser
        self.stopRequest = None  #threading.Event()
        self.redisDB = RedisDB()
        self.lock_key = "id_lock"
        self.optimization_type = optimization_type
        self.stop_signal_key = "opt_stop_" + self.id
        self.finish_status_key = "finish_status_" + self.id
        self.redisDB.set(self.stop_signal_key, False)
        self.redisDB.set(self.finish_status_key, False)
        self.repetition_completed = False
        self.preprocess = False
        self.input = None
        self.output = None
        if "False" in self.redisDB.get("Error mqtt" + self.id):
            self.output = OutputController(self.id, self.output_config)
        if "False" in self.redisDB.get("Error mqtt" + self.id):
            self.input = InputController(self.id, self.input_config_parser,
                                         config, self.control_frequency,
                                         self.horizon_in_steps,
                                         self.dT_in_seconds)
        """try:
            # dynamic load of a class
            self.logger.info("This is the model path: " + self.model_path)
            module = self.path_import2(self.model_path)
            self.logger.info(getattr(module, 'Model'))
            self.my_class = getattr(module, 'Model')

        except Exception as e:
            self.logger.error(e)
            raise InvalidModelException("model is invalid/contains python syntax errors")"""

    # Importint a class dynamically
    def path_import2(self, absolute_path):
        spec = importlib.util.spec_from_file_location(absolute_path,
                                                      absolute_path)
        module = spec.loader.load_module(spec.name)
        return module

    def join(self, timeout=None):
        #self.stopRequest.set()
        super(ControllerBase, self).join(timeout)

    def Stop(self):
        try:
            if self.input:
                self.input.Stop()
                self.logger.debug("Deleting input instances")
                #del self.input.inputPreprocess
                #del self.input
        except Exception as e:
            self.logger.error("error stopping input " + str(e))
        try:
            if self.output:
                self.output.Stop()
                self.logger.debug("Deleting output instances")
                #del self.output
        except Exception as e:
            self.logger.error("error stopping output " + str(e))

        #erasing files from pyomo
        #self.erase_pyomo_files()
        self.logger.debug("setting stop_signal_key")
        self.redisDB.set(self.stop_signal_key, True)

        if self.isAlive():
            self.join(1)

    def initialize_opt_solver(self):
        start_time_total = time.time()

        self.optsolver = SolverFactory(
            self.solver_name
        )  #, tee=False, keepfiles=False, verbose=False, load_solutions=False)  # , solver_io="lp")
        #self.optsolver.verbose= False
        #self.optsolver.load_solutions = False
        self.logger.debug("Solver factory: " + str(self.optsolver))
        #self.optsolver.options.tee=False
        #self.optsolver.options.keepfiles = False
        #self.optsolver.options.load_solutions = False
        # optsolver.options["max_iter"]=5000
        self.logger.info("solver instantiated with " + self.solver_name)
        #return self.optsolver

    def initialize_solver_manager(self):
        ###create a solver manager
        self.solver_manager = None
        #self.solver_manager = SolverManagerFactory('pyro', host='localhost')
        self.logger.debug("Starting the solver_manager")
        #return self.solver_manager
        # optsolver.options.pyro_shutdown = True

    def erase_pyomo_files(self, folder):
        # erasing files from pyomo
        #folder = "/usr/src/app/logs/pyomo"
        for the_file in os.listdir(folder):
            file_path = os.path.join(folder, the_file)
            try:
                if os.path.isfile(file_path):
                    os.unlink(file_path)
                # elif os.path.isdir(file_path): shutil.rmtree(file_path)
            except Exception as e:
                self.logger.error(e)

    # Start the optimization process and gives back a result
    def run(self):
        self.logger.info("Starting optimization controller")

        return_msg = "success"
        execution_error = False
        try:

            count = 0

            self.optimize(count, self.solver_name, self.model_path)

        except Exception as e:
            execution_error = True
            self.logger.error("error overall " + str(e))
            e = str(e)
            solver_error = "The SolverFactory was unable to create the solver"
            if solver_error in e:
                i = e.index(solver_error)
                i_start = e.index("\"", i)
                i_end = e.index("\"", i_start + 1)
                solver = e[i_start + 1:i_end]
                return_msg = "Incorrect solver " + str(solver) + " used"
            else:
                return_msg = e
        finally:

            self.logger.info("repetition completed " +
                             str(self.repetition_completed))
            self.logger.info("stop request " +
                             str(self.redisDB.get_bool(self.stop_signal_key)))
            self.logger.info("execution error " + str(execution_error))
            if not self.redisDB.get_bool(
                    self.stop_signal_key
            ) and not self.repetition_completed and not execution_error:
                self.logger.error("Process interrupted")
                self.redisDB.set("kill_signal", True)

            #erase pyomo folder
            folder = "/usr/src/app/logs/pyomo_" + str(self.id)
            shutil.rmtree(folder, ignore_errors=True)

            # If Stop signal arrives it tries to disconnect all mqtt clients
            if self.output:
                for key, object in self.output.mqtt.items():
                    object.MQTTExit()
                    self.logger.debug("Client " + key +
                                      " is being disconnected")

            self.logger.info(return_msg)
            self.redisDB.set(self.finish_status_key, True)
            return return_msg

    @abstractmethod
    def optimize(self, count, solver_name, model_path):
        while not self.redisDB.get_bool(self.stop_signal_key):
            pass

    def get_finish_status(self):
        return self.redisDB.get_bool(self.finish_status_key)
class ForecastPublisher(DataPublisher):

    def __init__(self, internal_topic_params, config, queue, publish_frequency, topic, id, horizon_in_steps,
                 dT_in_seconds):
        self.logger = MessageLogger.get_logger(__name__, id)
        self.load_data = {}
        self.flag = True
        self.file_path = os.path.join("/usr/src/app", "optimization", "loadData.dat")
        self.q = queue
        self.topic = topic
        self.horizon_in_steps = horizon_in_steps
        self.dT_in_seconds = dT_in_seconds
        self.id = id
        self.redisDB = RedisDB()
        try:
            super().__init__(True, internal_topic_params, config, publish_frequency, id)
        except Exception as e:
            self.redisDB.set("Error mqtt" + self.id, True)
            self.logger.error(e)

    def get_data(self):
        try:
            if not self.redisDB.get_bool(Constants.get_data_flow_key(self.id)):
                return None
            #  check if new data is available
            if not self.q.empty():
                try:
                    new_data = self.q.get_nowait()
                    self.q.task_done()
                    self.load_data = new_data
                except Exception:
                    self.logger.debug("Queue empty")
            if not self.load_data:
                return None
            self.logger.debug("extract load data")
            data = self.extract_horizon_data()
            self.logger.debug(str(data))
            return data
        except Exception as e:
            self.logger.error(str(e))
            return None

    def extract_horizon_data(self):
        meas = []
        list = self.load_data.items()
        list = sorted(list)
        list = list[-self.horizon_in_steps:]
        for i in range(self.horizon_in_steps):
            value = list[i][1]
            if value < 0:
                value = 0
            meas.append(self.get_senml_meas(value, list[i][0]))
        doc = senml.SenMLDocument(meas)
        val = doc.to_json()
        return json.dumps(val)

    def get_senml_meas(self, value, time):
        if not isinstance(time, float):
            time = float(time.timestamp())
        meas = senml.SenMLMeasurement()
        meas.time = time
        meas.value = value
        meas.name = self.topic
        return meas