Ejemplo n.º 1
0
_Author_ = "***********"

import subprocess

import csv
import sys
import time
from kafka import KafkaConsumer, KafkaProducer

from Initializer import Initialize

init_object = Initialize()


class kafka_producer():
    def publish_message(self, producer_instance, topic_name, key, value):
        try:
            key_bytes = bytearray(key, 'utf8')
            value_bytes = bytearray(value, 'utf8')
            producer_instance.send(topic_name,
                                   key=key_bytes,
                                   value=value_bytes)
            producer_instance.flush()
            print('Message published successfully.')
        except Exception as ex:
            print('Exception in publishing message')
            print(str(ex))

    def connect_kafka_producer(self):
        _producer = None
        try:
Ejemplo n.º 2
0
    def tester_model(self, data_path, lag, horizon):
        # Function to check the efficiency of a model
        from sklearn.externals import joblib

        import tensorflow as tf

        from tensorflow.python.keras.models import model_from_json

        #energy_model_file_json = "model_lstm_energy2_v2_H30_colab.json"
        energy_model_file_json = "model_lstm_energy2_v3_H30_colab.json"
        #energy_model_file_json = "model_lstm_energy2_v5_H10_colab.json"
        # energy_model_file_json = "model_lstm_energy2_v1_H5_colab.json"
        #energy_model_file_h5 = "model_lstm_energy_v2_H30_colab.h5"
        energy_model_file_h5 = "model_lstm_energy2_v3_H30_colab.h5"
        #energy_model_file_h5 = "model_lstm_energy2_v5_H10_colab.h5"
        #energy_model_file_h5 = "model_lstm_energy2_v1_H5_colab.h5"

        #scalar_energy = joblib.load("./model/scaler_h30_robust_v3.save")
        scalar_energy = joblib.load("./model/scaler_h30_robust_v3.save")
        #scalar_energy = joblib.load("./model/scaler_standard_10.save")
        #scalar_energy = joblib.load("./model/scaler_h5_standard.save")

        graph = tf.get_default_graph()

        init_obj = Initialize()
        json_file_energy = open(init_obj.model_path + energy_model_file_json,
                                'r')
        loaded_model_energy_json = json_file_energy.read()
        json_file_energy.close()
        loaded_model_energy = model_from_json(loaded_model_energy_json)
        # load weights into new model
        loaded_model_energy.load_weights(init_obj.model_path +
                                         energy_model_file_h5)
        print("Loaded model from disk")

        adaptation_df = pd.read_csv(
            data_path, sep=",",
            index_col="timestamp")  # Read the proccessed data frame

        adaptation_df_series = adaptation_df.values
        main_energy_list = []

        forecast_list = []
        actual_list = []
        actual_main_list = []

        for i in range(0, len(adaptation_df)):
            energy_value = 0
            #for j in range(0, 22):
            list_val = adaptation_df_series[i, :]
            main_energy_list.append(list_val)
            sum_val = 0
            for index in range(0, len(list_val)):
                if index != 20:
                    sum_val = sum_val + list_val[index]
            actual_main_list.append(sum_val)

            if len(actual_main_list) == horizon:
                actual_list.append(sum(actual_main_list))
                actual_main_list = []

            if len(main_energy_list) == 20:
                #print (main_energy_list)
                predict_array = np.array(main_energy_list)
                # print (predict_array.shape)
                predict_array = scalar_energy.fit_transform(predict_array)
                predict_array = predict_array.reshape(1, lag, 22)
                with graph.as_default():
                    energy_forecast = loaded_model_energy.predict(
                        predict_array)
                # K.clear_session()
                inverse_forecast = energy_forecast.reshape(horizon, 22)
                inverse_forecast = scalar_energy.inverse_transform(
                    inverse_forecast)
                inverse_forecast_features = inverse_forecast.reshape(
                    energy_forecast.shape[0], 22 * horizon)
                energy_forecast_total = 0
                for j in range(0, inverse_forecast_features.shape[1]):
                    # for j in range(0, 22*horizon): # Number of components * horizon equals inverse_forecast_Features.shape[1]
                    if j not in [
                            20, 42, 64, 86, 108, 130, 152, 174, 196, 218, 240,
                            262, 284, 306, 328, 350, 372, 394, 416, 438, 460,
                            482, 504, 526, 548, 570, 592, 614, 636, 658
                    ]:
                        energy_forecast_total = energy_forecast_total + inverse_forecast_features[
                            0, j]
                forecast_list.append(energy_forecast_total)
                #print (energy_forecast_total)
                #main_energy_list.pop()
                #main_energy_list.pop()
                #main_energy_list.pop()
                #main_energy_list.pop()
                #main_energy_list.pop()
                main_energy_list = []
        print(actual_list)
        print(forecast_list)
Ejemplo n.º 3
0
    def __init__(self):
        self.dict_sensor_freq_keys = {
            "v1EnNorm": 20000,
            "v1EnCrit": 5000,
            "v1ExNorm": 20000,
            "v1ExCrit": 5000,
            "v2EnNorm": 20000,
            "v2EnCrit": 5000,
            "v2ExNorm": 20000,
            "v2ExCrit": 5000,
            "v3EnNorm": 20000,
            "v3EnCrit": 5000,
            "v3ExNorm": 20000,
            "v3ExCrit": 5000,
            "p1EnNorm": 60000,
            "p1EnCrit": 10000,
            "p1ExNorm": 30000,
            "p1ExCrit": 10000,
            "p2EnNorm": 60000,
            "p2EnCrit": 10000,
            "p2ExNorm": 30000,
            "p2ExCrit": 10000
        }
        self.sensor_id_key_map = {
            "S34": "p1En",
            "S33": "p1Ex",
            "S42": "p2En",
            "S41": "p2Ex",
            "S1": "v1En",
            "S2": "v1Ex",
            "S18": "v2En",
            "S20": "v2Ex",
            "S24": "v3En",
            "S25": "v3Ex"
        }

        self.sensor_mapping = {
            'S1': 'S1',
            'S11': 'S2',
            'S17': 'S3',
            'S18': 'S4',
            'S2': 'S5',
            'S20': 'S6',
            'S24': 'S7',
            'S25': 'S8',
            'S26': 'S9',
            'S33': 'S10',
            'S34': 'S11',
            'S35': 'S12',
            'S41': 'S13',
            'S42': 'S14',
            'S43': 'S15',
            'S46': 'S16',
            'S47': 'S17',
            'S48': 'S18',
            'S49': 'S19',
            'S50': 'S20',
            'S51': 'S21',
            'S7': 'S22'
        }
        self.reverse_sensor_map = {
            'S1': 'S1',
            'S2': 'S11',
            'S3': 'S17',
            'S4': 'S18',
            'S5': 'S2',
            'S6': 'S20',
            'S7': 'S24',
            'S8': 'S25',
            'S9': 'S26',
            'S10': 'S33',
            'S11': 'S34',
            'S12': 'S35',
            'S13': 'S41',
            'S14': 'S42',
            'S15': 'S43',
            'S16': 'S46',
            'S17': 'S47',
            'S18': 'S48',
            'S19': 'S49',
            'S20': 'S50',
            'S21': 'S51',
            'S22': 'S7'
        }
        self.init_obj = Initialize()
        self.sensor_id_list = [
        ]  # Integere values just containing the id of the sensors
        for key in self.sensor_id_key_map:
            sensor_id = int(self.sensor_mapping[key].split("S")[1])
            self.sensor_id_list.append(sensor_id)

        # Define the energy thresholds 1.45 and 1.35§§§§§§§§§§§§§§§§§§§§§
        #self.high_power = 14.5
        #self.high_power = 22.05
        self.high_power = self.init_obj.energy_hp

        #self.base_power = 13.5
        self.base_power = self.init_obj.energy_bp

        # Define the reduction frequency values
        self.reduction_freq_normal_hp = 20000
        self.reduction_freq_critical_hp = 10000
        self.reduction_freq_normal_bp = 10000
        self.reduction_freq_critical_bp = 5000
        self.adapation_count = 0  # Keep a count on the total adaptations performed
        self.time_count = 0  # Keep a check on the time lapsed
        self.bp_time = 0  # If a sensor has stayed in bp for 20 instances reset this value and restore to old frequency
        self.bp_count = self.init_obj.bp_count
Ejemplo n.º 4
0
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import Flatten
from keras.layers import LSTM
from math import sqrt
import matplotlib
#matplotlib.use('Agg')
from matplotlib import pyplot
from numpy import array
from sklearn.externals import joblib
import numpy as np
from Initializer import Initialize
import time
from keras.layers.core import Dense, Activation, Dropout

init_object = Initialize()


class LSTM_Learner():
    # Class to perform Model Creation and Learning
    # Consists of different functions for normalization and data arrangement

    # date-time parsing function for loading the dataset
    def parser(self, x):
        return datetime.strptime(x, '%Y-%m-%d %H:%M:%S')

    # Load the dataset into the memory and start the learning
    def read_data(self, filepath):
        # Takes as input the path of the csv which contains the descriptions of energy consumed

        aggregated_df = read_csv(filepath,
Ejemplo n.º 5
0
from Initializer import Initialize
from SendMessage import SendMessage
from SendMessage import LoadLatestData
import time

init = Initialize()
message = SendMessage()
message.sendMessage("Hello this is Bcrec NoticeBoard")
# message.sendMessage("Notice Board Creation Successful")
# message.sendMessage("Downloading All links")
# batchMessage = init.batchMessage()
# for item in batchMessage:
#     link = SendMessage().createLink(item)
#     message.sendMessage(link)
loadLatest = LoadLatestData()
while True:
    newFileNames = loadLatest.getLatestData()
    if newFileNames != []:
        for item in newFileNames:
            print(item)
            link = message.createLink(item) + "\n" + message.createDate()
            print(link)
            #message.sendMessage(link)
    time.sleep(900)
Ejemplo n.º 6
0
from numpy import array
import pandas as pd
from datetime import datetime
from sklearn.externals import joblib

import tensorflow as tf

from tensorflow.python.keras.models import model_from_json
from tensorflow.python.keras import backend as K

from Adaptation_Planner import Adaptation_Planner

from Initializer import Initialize


init_obj = Initialize()
ada_obj = Adaptation_Planner()

import json
from Initializer import Initialize

prev_vals = [19160.0, 19160.0, 19160.0, 19160.0, 19160.0, 19160.0, 19160.0, 19160.0, 19160.0, 19160.0, 19160.0,
             19160.0, 19160.0, 19160.0, 19160.0, 19160.0, 19160.0, 19160.0, 19160.0, 19160.0, 19160.0,
             19160.0]  # Initialize the inital energy configuration

#energy_model_file_json = "model_lstm_energy2_v2_ colab.json"
energy_model_file_json = init_obj.adaptation_model_json
#energy_model_file_h5 = "model_lstm_energy_v2_colab.h5"
#energy_model_file_h5 = "model_lstm_energy_v3_H10_colab.h5"
energy_model_file_h5 = init_obj.adaptation_model_h5