Exemplo n.º 1
0
def verify_data_format(data_dict, PARALLEL=False):
    # Verify there is no  [] or N/A in the list
    # Only FLoat or Int format is allowed
    log.info('Checking any inconsisent data format...')
    log.info('-' * 40)

    list_of_wrong_data_format = list()
    time_slots = data_dict['time_slots']
    weather_list_used = [data_dict['weather_list'][i] for i in [1, 2, 3, 10, 11]]
    key_list = weather_list_used+ data_dict['sensor_list']

    if not PARALLEL:
        for key in key_list:
            log.info('checking ' + str(key) + '...')
            for i, samples in enumerate(data_dict[key][1]):
                for j, each_sample in enumerate(samples):

                    if each_sample == []:
                        list_of_wrong_data_format.append([key, i, j])
                        log.info(str(each_sample) + ' at ' + str(time_slots[i]) + ' in ' + str(key))

                    elif not isinstance(each_sample, int) and not isinstance(each_sample, float):
                        list_of_wrong_data_format.append([key, i, j])
                        log.info(str(each_sample) + ' at ' + str(time_slots[i]) + ' in ' + str(key))

        log.info('-' * 40)

    # PARALLEL
    else:
        manager = mp.Manager()
        q = manager.Queue()

        p = mp.Pool(CPU_CORE_NUM)
        param_list = [(key, data_dict[key][1], time_slots, q) for key in key_list]

        p.map(pp_verify_sensor_data_format, param_list)

        p.close()
        p.join()

        while not q.empty():
            item = q.get()
            log.warn('queue item: ' + str(item))
            list_of_wrong_data_format.append(item)
    
    if len(list_of_wrong_data_format) > 0:
        log.critical('Inconsistent data format in the list of data_used')
        raise NameError('Inconsistent data format in the list of data_used')

    return list_of_wrong_data_format
Exemplo n.º 2
0
def read_sensor_data(sensor_hash, start_time, end_time):

    from log_util import log

    sensor_data = dict()
    for stitle, uid in sensor_hash.iteritems():
        tsvals = read_quasar_url(uid, start_time, end_time)

        if tsvals is None or len(tsvals) == 0:
            log.critical(stitle + " (" + uid + ") is unavailable from " + str(start_time) + " to " + str(end_time))
        else:
            log.debug(uid + " : " + stitle + " : TS-VAL size " + str(len(tsvals)))

            """
            log.info(uid + " : " + stitle + " : TS-VAL reading saved in JSON format...")
            with open(JSON_DIR + "reading-" + uid + ".json", 'w') as f:
                f.write(simplejson.dumps(tsvals))
            """

            sensor_data.update({stitle: tsvals})

    return sensor_data
Exemplo n.º 3
0
def read_sensor_data(sensor_hash, start_time, end_time):

    from log_util import log
    client = InfluxDBClient('ddea-tsdb', 8086, 'ddea', 'ddea', 'ddea')
    sensor_data = dict()
    for stitle, uid in sensor_hash.iteritems():
        tsvals = read_influx_url(client, uid, start_time, end_time)

        if tsvals is None or len(tsvals) == 0:
            log.critical(stitle + " (" + uid + ") is unavailable from " + str(start_time) + " to " + str(end_time))
        else:
            log.debug(uid + " : " + stitle + " : TS-VAL size " + str(len(tsvals)))

            """
            log.info(uid + " : " + stitle + " : TS-VAL reading saved in JSON format...")
            with open(JSON_DIR + "reading-" + uid + ".json", 'w') as f:
                f.write(simplejson.dumps(tsvals))
            """

            sensor_data.update({stitle: tsvals})

    return sensor_data
Exemplo n.º 4
0
def ddea_process(sensor_names_hash, sensor_data, start_time, end_time, timelet_inv, bldg_key, pname_key, plot_analysis=False):
    #----------------------------- DATA PRE-PROCESSING -------------------------
    from log_util import log

    log.info('#' * 80)
    log.info('#  Data Pre-Processing')
    log.info('#' * 80)

    ans_start_t = dt.datetime.fromtimestamp(start_time)
    ans_end_t = dt.datetime.fromtimestamp(end_time)

    data_dict, purge_list = \
            construct_data_dict(sensor_data, ans_start_t, ans_end_t, timelet_inv, PARALLEL=IS_USING_PARALLEL_OPT)

    # This perform data summerization process.
    log.info('-' * 40)
    log.info('VERIFY DATA FORMAT...')
    log.info('-' * 40)

    # This is for data verification purpose
    # You cab skip it if you are sure that there would be no bug in the 'construct_data_dict' function.
    list_of_wrong_data_format = \
        verify_data_format(data_dict, PARALLEL=IS_USING_PARALLEL_OPT)

    if len(list_of_wrong_data_format) > 0:
        log.critical('Measurement list below')
        log.critical('-' * 40)
        log.critical(str(list_of_wrong_data_format))
        raise NameError('Errors in data format')

    if SAVE_PROC_BIN:
        # Save summarized Data in Bin Format
        log.info("Saving data_dict in bin format...")
        mt.saveObjectBinaryFast(data_dict, PROC_OUT_DIR + '/' + bldg_key.lower() + '_data_dict.bin')

    #----------------------------- DATA SUMMERIZATION --------------------------
    # This perform data summerization process.
    log.info('#' * 80)
    log.info('DATA SUMMARIZATION...')
    log.info('#' * 80)

    # Compute Average Feature if PROC_AVG == True
    # Compute Differential Feature if PROC_DIFF == True

    bldg_load_out = data_summerization(bldg_key, data_dict, PARALLEL=IS_USING_PARALLEL_OPT)

    if SAVE_PROC_BIN:
        # Save summarized Data in Bin Format
        log.info("Saving summarized building data in bin format...")
        mt.saveObjectBinaryFast(bldg_load_out, PROC_OUT_DIR + bldg_key.lower() + '_ds_out.bin')

    # Export Summarized Data to JSON
    feat_avg_exist = bool('avgdata_dict' in bldg_load_out.keys())
    feat_diff_exist = bool('diffdata_dict' in bldg_load_out.keys())

    if feat_avg_exist and feat_diff_exist:
        log.info("Saving summarized building data in JSON format...")
        save_processed_json(sensor_names_hash, bldg_load_out)

    if feat_avg_exist:
        save_avg_data_summary_json(bldg_key, sensor_names_hash, bldg_load_out['avgdata_dict'])

    if feat_diff_exist:
        save_diff_data_summary_json(bldg_key, sensor_names_hash, bldg_load_out['diffdata_dict'])

    #------------------------------- MODEL DISCOVERY ---------------------------
    log.info('#' * 80)
    log.info('MODEL DISCOVERY...')
    log.info('#' * 80)

    log.info('Building for '+ bldg_key + '....')

    ## CREATE BUILDING OBJECT ##
    bldg = pbp.create_bldg_object(bldg_load_out, bldg_key, pname_key, PARALLEL=IS_USING_PARALLEL_OPT)

    ## BAYESIAN NETWORK PROBABILITY ANALYSIS OBJECT ##
    if feat_avg_exist:
        avg = pbp.bn_probability_analysis(bldg, sig_tag='avg')
        bldg.anal_out.update({'avg': avg})

    if feat_diff_exist:
        diff = pbp.bn_probability_analysis(bldg, sig_tag='diff')
        bldg.anal_out.update({'diff': diff})

    if SAVE_PROC_BIN:
        # Save a building data in Bin format
        log.info("Saving building graph in bin format...")
        mt.saveObjectBinaryFast(bldg, PROC_OUT_DIR + bldg_key.lower() + '_bldg_out.bin')

    # Export a building graph in json format
    log.info("Saving building graph in JSON format...")
    all_labels, all_edges = conv_bn_graph_json(bldg)
    save_bn_graph_json(bldg_key, sensor_names_hash, all_labels, all_edges)

    if plot_analysis:
        log.info('#' * 80)
        log.info('ANALYTICS PLOTTING...')
        log.info('#' * 80)

        # Analysis of BN network result - All result will be saved in fig_dir.
        pbp.plotting_bldg_lh(bldg, attr='sensor', num_picks=30)
        pbp.plotting_bldg_lh(bldg, attr='time', num_picks=30)
        pbp.plotting_bldg_lh(bldg, attr='weather', num_picks=30)

        pbp.plotting_bldg_bn(bldg)
Exemplo n.º 5
0
def get_weather_timelet(data_dict,t_slots, timelet_inv, use_weather_data_bin=True):

    log.info('------------------------------------')
    log.info('Retrieving weather data... ')
    log.info('------------------------------------')
    t_start = t_slots[0]
    t_end = t_slots[-1]
    log.info('start time: ' + str(t_start) + ' ~ end time: ' + str(t_end))

    # Date iteration given start time and end-time
    # Iterate for each day for all weather data types
    for date_idx, date in enumerate(daterange(t_start, t_end, inclusive=True)):
        log.info("weather date : " + date.strftime("%Y-%m-%d"))

        temp = date.strftime("%Y,%m,%d").rsplit(',')

        if use_weather_data_bin:
            filename = WEATHER_DIR + "%04d_%02d_%02d.bin"%(int(temp[0]), int(temp[1]), int(temp[2]))
            data_day = mt.loadObjectBinaryFast(filename)
        else:
            data_day = rw.retrieve_data('SDH', int(temp[0]), int(temp[1]), int(temp[2]), view='d')

        # split the data into t
        data_day = data_day.split('\n')

        # Iterate for each time index(h_idx) of a day  for all weather data types
        for h_idx, hour_sample in enumerate(data_day):

            hour_samples = hour_sample.split(',')

            # Initialize weather data lists of dictionary
            # The first row is always the list of weather data types
            if (h_idx == 0) and (date_idx == 0):

                sensor_name_list = hour_sample.split(',')
                sensor_name_list = [sensor_name.replace('/', '-') for sensor_name in sensor_name_list]

                for sample_idx, each_sample in enumerate(hour_samples):
                    sensor_name = sensor_name_list[sample_idx]
                    sensor_read = [[] for i in range(len(t_slots))]
                    stime_read = [[] for i in range(len(t_slots))] # Creat the list of lists for minute index
                    utc_t = []
                    val = []
                    #data_dict.update({sensor_name:sensor_read})
                    #data_dict.update({sensor_name:zip(mtime_read,sensor_read)})
                    data_dict.update({sensor_name: [stime_read, sensor_read, [utc_t, val]]})

            elif h_idx > 0:
                ################################################################
                # 'DateUTC' is the one
                sample_DateUTC = hour_samples[sensor_name_list.index('DateUTC')]

                # convert to UTC time to VTT local time.
                utc_dt = dt.datetime.strptime(sample_DateUTC, "%Y-%m-%d %H:%M:%S")
                vtt_dt_aware = utc_dt.replace(tzinfo=from_zone).astimezone(to_zone)

                # convert to offset-naive from offset-aware datetimes
                vtt_dt = dt.datetime(*(vtt_dt_aware.timetuple()[:6]))

                ### WARNING: vtt_utc is not utc
                #log.warn("vtt_utc is not utc")
                vtt_utc = dtime_to_unix([vtt_dt])

                # Check boundary condition
                if int((vtt_dt - t_slots[0]).total_seconds()) < 0 or int((vtt_dt - t_slots[-1]).total_seconds()) >= timelet_inv.seconds:
                    log.debug('skipping weather data out of analysis range...')
                    continue

                slot_idx = int((vtt_dt - t_slots[0]).total_seconds() / timelet_inv.seconds)
                cur_sec_val = (vtt_dt - t_slots[slot_idx]).total_seconds()

                if cur_sec_val >= timelet_inv.seconds:
                    log.critical('sec: ' + str(cur_sec_val))
                    raise NameError('Seconds from an hour idx cannot be greater than '+str(timelet_inv.seconds) +'secs')

                # time slot index a given weather sample time
                try:

                    for sample_idx, each_sample in enumerate(hour_samples):

                        # convert string type to float time if possible
                        try:
                            each_sample = float(each_sample)
                        except ValueError:
                            each_sample = each_sample

                        sensor_name = sensor_name_list[sample_idx]

                        if sensor_name in data_dict:
                            if each_sample != 'N/A' and each_sample !=[]:
                                #data_dict[sensor_name][vtt_dt_idx].append(each_sample)
                                data_dict[sensor_name][0][slot_idx].append(cur_sec_val)
                                data_dict[sensor_name][1][slot_idx].append(each_sample)
                                data_dict[sensor_name][2][0].append(vtt_utc)
                                data_dict[sensor_name][2][1].append(each_sample)

                        else:
                            raise NameError('Inconsistency in the list of weather data')

                except ValueError:
                    slot_idx = -1

            # hour_sample is list of weather filed name, discard
            else:

                hour_sample = list()

    return sensor_name_list
Exemplo n.º 6
0
def get_val_timelet(reading, t_slots, ans_start_t, ans_end_t, timelet_inv):

    data = dict()
    data['value'] = np.array([r[1] for r in reading], dtype=float)

    ts_list = list()
    for r in reading:
        local_dt = dt.datetime.fromtimestamp(r[0])
        time_tup = local_dt.timetuple()
        ts_list.append([local_dt, time_tup[5], time_tup[4], time_tup[3], time_tup[6], time_tup[2], time_tup[1]])

    data['ts'] = np.array(ts_list)


    if not len(data):
        log.critical('Error in file reading: empty data. Skip and need to be purged from sensor list')

        sensor_read = -1
        stime_read = -1
        utc_t = -1
        val = -1
        return sensor_read, stime_read, utc_t, val

    if (len(data["ts"]) < MIN_NUM_VAL_FOR_FLOAT) or (len(data["value"]) < MIN_NUM_VAL_FOR_FLOAT):
        log.critical('No data included ' + str(data) + '... Skip and need to be purged from sensor list')

        sensor_read = -1
        stime_read = -1
        utc_t = -1
        val = -1
        return sensor_read, stime_read, utc_t, val

    nan_idx_list = np.nonzero(np.isnan(data["value"]))[0]
    sensor_val = np.delete(data["value"], nan_idx_list, axis=0)
    time_val = np.delete(data["ts"], nan_idx_list, axis=0)

    # Create the list of lists for value
    sensor_read = [[] for i in range(len(t_slots))]

    # Create the list of lists for seconds index
    stime_read = [[] for i in range(len(t_slots))]

    utc_t = []
    val = []

    for t_sample, v_sample in zip(time_val, sensor_val):
        temp_dt = t_sample[DT_IDX]

        if temp_dt < ans_start_t or temp_dt >= ans_end_t:
            continue

        try:
            idx = int((temp_dt - ans_start_t).total_seconds() / timelet_inv.seconds)
            sensor_read[idx].append(v_sample)
            #secs=t_sample[MIN_IDX]*MIN+t_sample[SEC_IDX]
            secs = (temp_dt - t_slots[idx]).total_seconds()
            if secs >= timelet_inv.seconds:
                log.info('sec: ' + str(secs))
                raise NameError('Seconds from an hour idx cannot be greater than ' + str(timelet_inv.seconds) + 'secs')

            stime_read[idx].append(secs)

        except ValueError:
            idx = -1

        utc_temp = dtime_to_unix([t_sample[DT_IDX]])
        utc_t.append(utc_temp)
        val.append(v_sample)

    return sensor_read, stime_read, utc_t, val
Exemplo n.º 7
0
    def run(self):

        from log_util import log

        try:
            while True:
                cmd = None
                try:
                    cmd = self.cmd_q.get(block=True, timeout=0.1)
                except Exception as e:
                    continue

                finally:
                    if cmd:
                        self.cmd_q.task_done()

                        try:

                            with open(META_DIR + "wip.json", 'w') as f:
                                f.write(simplejson.dumps({"wip": 1}))

                            cmdset = simplejson.loads(cmd)
                            sensor_hash = cmdset['selected-nodes']
                            s_date = datetime.strptime(cmdset['start-date'], '%Y-%m-%d')
                            e_date = datetime.strptime(cmdset['end-date'], '%Y-%m-%d')

                            if not len(sensor_hash):
                                log.critical("No sensor is selected!")
                            else:

                                log.info('****************************** Begining of DDEA *******************************')

                                bldg_key = 'SODA'
                                #exemplar by user
                                #pname_key = '_POWER_'
                                pname_key = 'POWER'

                                s_epoch = int(time.mktime(s_date.timetuple()))
                                e_epoch = int(time.mktime(e_date.timetuple()))
                                time_inv = dt.timedelta(seconds=cmdset['time-interval'])

                                log.info("Cleaning up old output...")

                                mt.remove_all_files(FIG_DIR)
                                mt.remove_all_files(JSON_DIR)
                                mt.remove_all_files(PROC_OUT_DIR)

                                log.info("start epoch : " + str(s_epoch) + " end epoch : " + str(e_epoch))
                                log.info(str(time_inv) + ' time slot interval is set for this data set !!!')
                                log.info("BLDG_KEY : " + bldg_key + " PNAME_KEY : " + pname_key)
                                log.info('*' * 80)

                                log.info("Retrieve sensor data from quasar TSDB")

                                sensor_names_hash = mt.sensor_name_uid_dict(bldg_key, sensor_hash)

                                sensor_data = read_sensor_data(sensor_names_hash, s_epoch, e_epoch)

                                if sensor_data and len(sensor_data):
                                    ddea_process(sensor_names_hash, sensor_data, s_epoch, e_epoch, time_inv, bldg_key, pname_key)
                                else:
                                    log.critical("No sensor data available for time period and sensor selected!")

                                log.info('******************************** End of DDEA **********************************')

                            os.remove(META_DIR + "wip.json")
                            cmd_lock.clear()

                            log.info("execution-lock cleared")
                            log.info('~' * 80)

                        except Exception as e:
                            os.remove(META_DIR + "wip.json")
                            cmd_lock.clear()
                            print e
                            log.error(str(e))

        except Exception as e:
            os.remove(META_DIR + "wip.json")
            cmd_lock.clear()
            print e
            log.error(str(e))

        finally:
            sys.exit(0)
Exemplo n.º 8
0
def get_weather_timelet(data_dict,
                        t_slots,
                        timelet_inv,
                        use_weather_data_bin=True):

    log.info('------------------------------------')
    log.info('Retrieving weather data... ')
    log.info('------------------------------------')
    t_start = t_slots[0]
    t_end = t_slots[-1]
    log.info('start time: ' + str(t_start) + ' ~ end time: ' + str(t_end))

    # Date iteration given start time and end-time
    # Iterate for each day for all weather data types
    for date_idx, date in enumerate(daterange(t_start, t_end, inclusive=True)):
        log.info("weather date : " + date.strftime("%Y-%m-%d"))

        temp = date.strftime("%Y,%m,%d").rsplit(',')

        if use_weather_data_bin:
            filename = WEATHER_DIR + "%04d_%02d_%02d.bin" % (int(
                temp[0]), int(temp[1]), int(temp[2]))
            data_day = mt.loadObjectBinaryFast(filename)
        else:
            data_day = rw.retrieve_data('SDH',
                                        int(temp[0]),
                                        int(temp[1]),
                                        int(temp[2]),
                                        view='d')

        # split the data into t
        data_day = data_day.split('\n')

        # Iterate for each time index(h_idx) of a day  for all weather data types
        for h_idx, hour_sample in enumerate(data_day):

            hour_samples = hour_sample.split(',')

            # Initialize weather data lists of dictionary
            # The first row is always the list of weather data types
            if (h_idx == 0) and (date_idx == 0):

                sensor_name_list = hour_sample.split(',')
                sensor_name_list = [
                    sensor_name.replace('/', '-')
                    for sensor_name in sensor_name_list
                ]

                for sample_idx, each_sample in enumerate(hour_samples):
                    sensor_name = sensor_name_list[sample_idx]
                    sensor_read = [[] for i in range(len(t_slots))]
                    stime_read = [[] for i in range(len(t_slots))
                                  ]  # Creat the list of lists for minute index
                    utc_t = []
                    val = []
                    #data_dict.update({sensor_name:sensor_read})
                    #data_dict.update({sensor_name:zip(mtime_read,sensor_read)})
                    data_dict.update(
                        {sensor_name: [stime_read, sensor_read, [utc_t, val]]})

            elif h_idx > 0:
                ################################################################
                # 'DateUTC' is the one
                sample_DateUTC = hour_samples[sensor_name_list.index(
                    'DateUTC')]

                # convert to UTC time to VTT local time.
                utc_dt = dt.datetime.strptime(sample_DateUTC,
                                              "%Y-%m-%d %H:%M:%S")
                vtt_dt_aware = utc_dt.replace(
                    tzinfo=from_zone).astimezone(to_zone)

                # convert to offset-naive from offset-aware datetimes
                vtt_dt = dt.datetime(*(vtt_dt_aware.timetuple()[:6]))

                ### WARNING: vtt_utc is not utc
                #log.warn("vtt_utc is not utc")
                vtt_utc = dtime_to_unix([vtt_dt])

                # Check boundary condition
                if int((vtt_dt - t_slots[0]).total_seconds()) < 0 or int(
                    (vtt_dt -
                     t_slots[-1]).total_seconds()) >= timelet_inv.seconds:
                    log.debug('skipping weather data out of analysis range...')
                    continue

                slot_idx = int((vtt_dt - t_slots[0]).total_seconds() /
                               timelet_inv.seconds)
                cur_sec_val = (vtt_dt - t_slots[slot_idx]).total_seconds()

                if cur_sec_val >= timelet_inv.seconds:
                    log.critical('sec: ' + str(cur_sec_val))
                    raise NameError(
                        'Seconds from an hour idx cannot be greater than ' +
                        str(timelet_inv.seconds) + 'secs')

                # time slot index a given weather sample time
                try:

                    for sample_idx, each_sample in enumerate(hour_samples):

                        # convert string type to float time if possible
                        try:
                            each_sample = float(each_sample)
                        except ValueError:
                            each_sample = each_sample

                        sensor_name = sensor_name_list[sample_idx]

                        if sensor_name in data_dict:
                            if each_sample != 'N/A' and each_sample != []:
                                #data_dict[sensor_name][vtt_dt_idx].append(each_sample)
                                data_dict[sensor_name][0][slot_idx].append(
                                    cur_sec_val)
                                data_dict[sensor_name][1][slot_idx].append(
                                    each_sample)
                                data_dict[sensor_name][2][0].append(vtt_utc)
                                data_dict[sensor_name][2][1].append(
                                    each_sample)

                        else:
                            raise NameError(
                                'Inconsistency in the list of weather data')

                except ValueError:
                    slot_idx = -1

            # hour_sample is list of weather filed name, discard
            else:

                hour_sample = list()

    return sensor_name_list
Exemplo n.º 9
0
def get_val_timelet(reading, t_slots, ans_start_t, ans_end_t, timelet_inv):

    data = dict()
    data['value'] = np.array([r[1] for r in reading], dtype=float)

    ts_list = list()
    for r in reading:
        local_dt = dt.datetime.fromtimestamp(r[0])
        time_tup = local_dt.timetuple()
        ts_list.append([
            local_dt, time_tup[5], time_tup[4], time_tup[3], time_tup[6],
            time_tup[2], time_tup[1]
        ])

    data['ts'] = np.array(ts_list)

    if not len(data):
        log.critical(
            'Error in file reading: empty data. Skip and need to be purged from sensor list'
        )

        sensor_read = -1
        stime_read = -1
        utc_t = -1
        val = -1
        return sensor_read, stime_read, utc_t, val

    if (len(data["ts"]) < MIN_NUM_VAL_FOR_FLOAT) or (len(data["value"]) <
                                                     MIN_NUM_VAL_FOR_FLOAT):
        log.critical('No data included ' + str(data) +
                     '... Skip and need to be purged from sensor list')

        sensor_read = -1
        stime_read = -1
        utc_t = -1
        val = -1
        return sensor_read, stime_read, utc_t, val

    nan_idx_list = np.nonzero(np.isnan(data["value"]))[0]
    sensor_val = np.delete(data["value"], nan_idx_list, axis=0)
    time_val = np.delete(data["ts"], nan_idx_list, axis=0)

    # Create the list of lists for value
    sensor_read = [[] for i in range(len(t_slots))]

    # Create the list of lists for seconds index
    stime_read = [[] for i in range(len(t_slots))]

    utc_t = []
    val = []

    for t_sample, v_sample in zip(time_val, sensor_val):
        temp_dt = t_sample[DT_IDX]

        if temp_dt < ans_start_t or temp_dt >= ans_end_t:
            continue

        try:
            idx = int(
                (temp_dt - ans_start_t).total_seconds() / timelet_inv.seconds)
            sensor_read[idx].append(v_sample)
            #secs=t_sample[MIN_IDX]*MIN+t_sample[SEC_IDX]
            secs = (temp_dt - t_slots[idx]).total_seconds()
            if secs >= timelet_inv.seconds:
                log.info('sec: ' + str(secs))
                raise NameError(
                    'Seconds from an hour idx cannot be greater than ' +
                    str(timelet_inv.seconds) + 'secs')

            stime_read[idx].append(secs)

        except ValueError:
            idx = -1

        utc_temp = dtime_to_unix([t_sample[DT_IDX]])
        utc_t.append(utc_temp)
        val.append(v_sample)

    return sensor_read, stime_read, utc_t, val
Exemplo n.º 10
0
    def run(self):

        from log_util import log
        while True:

            cmd = None
            try:
                cmd = self.cmd_q.get(block=True, timeout=0.1)
            except Exception as e:
                continue

            if cmd:
                self.cmd_q.task_done()

                try:
                    with open(META_DIR + "wip.json", 'w') as f:
                        f.write(simplejson.dumps({"wip": 1}))

                    cmdset = simplejson.loads(cmd)
                    sensor_hash = cmdset['selected-nodes']
                    s_date = datetime.strptime(cmdset['start-date'],
                                               '%Y-%m-%d')
                    e_date = datetime.strptime(cmdset['end-date'], '%Y-%m-%d')

                    if not len(sensor_hash):
                        log.critical("No sensor is selected!")
                    else:

                        log.info(
                            '****************************** Begining of DDEA *******************************'
                        )

                        bldg_key = 'SDH'
                        #exemplar by user
                        #pname_key = '_POWER_'
                        pname_key = 'POWER'

                        s_epoch = int(time.mktime(s_date.timetuple()))
                        e_epoch = int(time.mktime(e_date.timetuple()))
                        time_inv = dt.timedelta(
                            seconds=cmdset['time-interval'])

                        log.info("Cleaning up old output...")

                        mt.remove_all_files(FIG_DIR)
                        mt.remove_all_files(JSON_DIR)
                        mt.remove_all_files(PROC_OUT_DIR)

                        log.info("start epoch : " + str(s_epoch) +
                                 " end epoch : " + str(e_epoch))
                        log.info(
                            str(time_inv) +
                            ' time slot interval is set for this data set !!!')
                        log.info("BLDG_KEY : " + bldg_key + " PNAME_KEY : " +
                                 pname_key)
                        log.info('*' * 80)

                        log.info("Retrieve sensor data from quasar TSDB")

                        sensor_names_hash = mt.sensor_name_uid_dict(
                            bldg_key, sensor_hash)

                        sensor_data = read_sensor_data(sensor_names_hash,
                                                       s_epoch, e_epoch)

                        if sensor_data and len(sensor_data):
                            ddea_process(sensor_names_hash, sensor_data,
                                         s_epoch, e_epoch, time_inv, bldg_key,
                                         pname_key)
                        else:
                            log.critical(
                                "No sensor data available for time period and sensor selected!"
                            )

                        log.info(
                            '******************************** End of DDEA **********************************'
                        )

                except Exception as e:

                    log.error(traceback.print_exc())
                    log.error(str(e))

                finally:
                    os.remove(META_DIR + "wip.json")
                    cmd_lock.clear()

                    log.info("execution-lock cleared")
                    log.info('~' * 80)