Пример #1
0
def generate_file(con, start_shift, end_shift, output_file, enable_regression):
    logging.info('start: ' + output_file)

    graphs = Graph("./../../src/graph")

    # stiahnutie dat
    storage = Storage('examples/events_peto.json', 0, 'measured_filtered_peto')
    d = storage.load_data(con, start_shift, end_shift, 'co2_in_ppm')
    logging.info('downloaded events: %d' % len(d))

    # aplikovanie filtrov na eventy
    filtered = FilterUtil.only_valid_events(d)

    # for travis
    if ConnectionUtil.is_testable_system():
        filtered = filtered[:ConnectionUtil.MAX_TESTABLE_EVENTS]

    logging.info('events after applying the filter: %d' % len(filtered))

    # spocitanie regresie
    if enable_regression:
        filtered = compute_regression(filtered)

    logging.info('start generating graphs')
    gr = []
    for event in filtered:
        t = DateTimeUtil.utc_timestamp_to_str(event['e_start']['timestamp'], '%d.%m. %H:%M:%S')
        t += ' - '
        t += DateTimeUtil.utc_timestamp_to_str(event['e_end']['timestamp'], '%H:%M:%S')

        if enable_regression:
            gg = [
                Graph.db_to_simple_graph(event, 'co2_in_ppm', 'green', 'CO2', 50),
                Graph.db_to_simple_graph(event, 'co2_in_ppm_exp', 'red', 'SimpleExpRegression', 50),
                Graph.db_to_simple_graph(event, 'co2_in_ppm_exp2', 'orange', 'ExpRegressionWithDelay', 50),
            ]
        else:
            gg = [
                Graph.db_to_simple_graph(event, 'co2_in_ppm', 'green', 'CO2', 50),
            ]

        g = {
            'title': t,
            'graphs': gg
        }
        gr.append(g)

    graphs.gen(gr, output_file + '.html', 0, 0)
    logging.info('end generating graphs')

    logging.info('end')
Пример #2
0
def training_set(events_file: str, no_event_time_shift: int, table_name: str,
                 directory):
    logging.info('start')

    # stiahnutie dat
    con = ConnectionUtil.create_con()
    storage = Storage(events_file, no_event_time_shift, table_name)
    d = storage.load_data(con, 0, 0, 'co2_in_ppm')
    logging.info('downloaded events: %d' % len(d))

    # aplikovanie filtrov na eventy
    filtered = FilterUtil.only_valid_events(d)

    # for travis
    no_ev_records = no_events_records
    if ConnectionUtil.is_testable_system():
        filtered = filtered[:ConnectionUtil.MAX_TESTABLE_EVENTS]
        no_ev_records = no_events_records[:ConnectionUtil.MAX_TESTABLE_EVENTS]

    logging.info('events after applying the filter: %d' % len(filtered))

    # selector pre data
    row_selector = CachedDiffRowWithIntervalSelector(con, table_name, 0, 0)
    interval_selector = None

    # trenovacia mnozina
    logging.info('start computing of training set')
    training, tr_events = AttributeUtil.cached_training_data(
        con, table_name, filtered, func, row_selector, interval_selector,
        'open', '{0}/training_cached.csv'.format(directory))
    count = len(training)
    logging.info('training set contains %d events (%d records)' %
                 (count / 2, count))

    GraphUtil.gen_duration_histogram(tr_events, 'save', ['png'],
                                     'Histogram dlzok vetrania',
                                     [x for x in range(5, 60, 5)], 1)

    training2 = AttributeUtil.additional_training_set(con, table_name,
                                                      no_ev_records, func,
                                                      row_selector,
                                                      interval_selector)
    count2 = len(training2)
    logging.info('additional training set contains %d records' % count2)

    logging.info('end computing of training set')

    logging.info('start preparing file of training set')
    balanced = AttributeUtil.balance_set(training, training2)
    CSVUtil.create_csv_file(balanced, '{0}/training.csv'.format(directory))
    logging.info('end preparing file of training set')
Пример #3
0
def training_set(events_file: str, no_event_time_shift: int, table_name: str):
    logging.info('start')

    # download data
    con = ConnectionUtil.create_con()
    storage = Storage(events_file, no_event_time_shift, table_name)
    d = storage.load_data(con, 0, 0, 'rh_in2_specific_g_kg')
    logging.info('downloaded events: %d' % len(d))

    # apply filters to data
    filtered = FilterUtil.only_valid_events(d)
    # filtered = FilterUtil.temperature_diff(filtered, 5, 100)
    # filtered = FilterUtil.temperature_out_max(filtered, 15)
    # filtered = FilterUtil.humidity(filtered, 6, 1.6, 100)

    # for travis
    no_ev_records = no_events_records
    if ConnectionUtil.is_testable_system():
        filtered = filtered[:ConnectionUtil.MAX_TESTABLE_EVENTS]
        no_ev_records = no_events_records[:ConnectionUtil.MAX_TESTABLE_EVENTS]

    logging.info('events after applying the filter: %d' % len(filtered))

    row_selector = CachedDiffRowWithIntervalSelector(con, table_name, 0, 0)
    interval_selector = SimpleIntervalSelector(con, table_name)

    logging.info('start computing of training set')
    training, tr_events = AttributeUtil.training_data(con, table_name,
                                                      filtered, func,
                                                      row_selector,
                                                      interval_selector,
                                                      'open')
    count = len(training)
    logging.info('training set contains %d events (%d records)' %
                 (count / 2, count))

    training2 = AttributeUtil.additional_training_set(con, table_name,
                                                      no_ev_records, func,
                                                      row_selector,
                                                      interval_selector)
    count2 = len(training2)
    logging.info('additional training set contains %d records' % count2)

    logging.info('end computing of training set')

    logging.info('start preparing file of training set')
    balanced = AttributeUtil.balance_set(training, training2)
    CSVUtil.create_csv_file(balanced, 'training.csv')
    logging.info('end preparing file of training set')
Пример #4
0
    def row(self, column_name, time):
        if column_name not in self.cache:
            self.cache[column_name] = {}
            res = Storage.select_interval(self.con,
                                          self.start,
                                          self.end,
                                          column_name,
                                          self.table_name,
                                          without_none_value=False)

            actual_timestamp = self.start
            for row in res:
                if row is None:
                    self.cache[column_name][actual_timestamp] = None
                else:
                    self.cache[column_name][actual_timestamp] = float(row)
                actual_timestamp += 1

        if time in self.cache[column_name]:
            value = self.cache[column_name][time]
        else:
            value = super(CachedRowWithIntervalSelector,
                          self).row(column_name, time)

        if value is None:
            t = DateTimeUtil.utc_timestamp_to_str(time, '%Y/%m/%d %H:%M:%S')
            raise ValueError('empty value at %s' % t)
        return value
Пример #5
0
    def row(self, column_name, time):
        res = Storage.one_row(self.con, self.table_name, column_name, time)

        if res is None or res[0] is None:
            t = DateTimeUtil.utc_timestamp_to_str(time, '%Y/%m/%d %H:%M:%S')
            raise ValueError('empty value at %s' % t)

        return float(res[0])
Пример #6
0
def training_set(events_file: str, no_event_time_shift: int, table_name: str):
    logging.info('start')

    # stiahnutie dat
    con = ConnectionUtil.create_con()
    storage = Storage(events_file, no_event_time_shift, table_name)
    d = storage.load_data(con, 0, 0, 'co2_in_ppm')
    logging.info('downloaded events: %d' % len(d))

    # aplikovanie filtrov na eventy
    filtered = FilterUtil.only_valid_events(d)

    # for travis
    if ConnectionUtil.is_testable_system():
        filtered = filtered[:ConnectionUtil.MAX_TESTABLE_EVENTS]

    logging.info('events after applying the filter: %d' % len(filtered))

    # selector pre data
    row_selector = CachedDiffRowWithIntervalSelector(con, table_name, 0, 0)
    interval_selector = None

    # datova mnozina
    logging.info('start computing of data set')
    data = AttributeUtil.training_data_without_opposite(
        con, table_name, filtered, func, row_selector, interval_selector)
    logging.info('data set contains %d events' % len(data))
    logging.info('end computing of data set')

    # generovanie suborov
    logging.info('start preparing file of training and testing set')
    random.seed(len(data) // 2)
    random.shuffle(data)

    CSVUtil.create_csv_file(data, 'data.csv')
    logging.info('end preparing file of training and testing set')

    logging.info('end')
Пример #7
0
    def row(self, column_name, time):
        if column_name not in self.cache:
            self.cache[column_name] = {}

        value = None
        if time in self.cache[column_name]:
            value = self.cache[column_name][time]
        else:
            res = Storage.one_row(self.con, self.table_name, column_name, time)

            if res is not None and res[0] is not None:
                self.cache[column_name][time] = float(res[0])
                value = float(res[0])

        if value is None:
            t = DateTimeUtil.utc_timestamp_to_str(time, '%Y/%m/%d %H:%M:%S')
            raise ValueError('empty value at %s' % t)

        return value
Пример #8
0
    def row(self, column_name, time):
        if column_name not in self.cache:
            self.cache[column_name] = {}

        if time in self.cache[column_name]:
            value = self.cache[column_name][time]
        else:
            start = time - self.half_window_size
            end = time + self.half_window_size
            res = Storage.select_interval(self.con, start, end, column_name, self.table_name,
                                          without_none_value=False)

            error = False
            if res is None or None in res:
                error = True

            if error:
                self.cache[column_name][time] = None
                t = DateTimeUtil.utc_timestamp_to_str(time, '%Y/%m/%d %H:%M:%S')
                raise ValueError('empty value at %s' % t)

            x = []
            y = []
            for i in range(0, len(res)):
                x.append(i)
                y.append(res[i])

            slope, intercept, _, _, _ = stats.linregress(x, y)

            value = intercept + slope * self.half_window_size
            self.cache[column_name][time] = value

        if value is None:
            t = DateTimeUtil.utc_timestamp_to_str(time, '%Y/%m/%d %H:%M:%S')
            raise ValueError('empty value at %s' % t)

        return value
Пример #9
0
    def prepare_derivation_lin_reg(con, events: list, intervals_before: list,
                                   intervals_after: list, table_name: str,
                                   precision: int, derivation_attr_name: str,
                                   lin_window_size: tuple):
        for i in range(0, len(events)):
            event = events[i]

            no_event_shift = event['no_event_time_shift']
            start = event['e_start']['timestamp']

            values1 = Storage.select_interval(con, start - lin_window_size[0],
                                              start + lin_window_size[1],
                                              derivation_attr_name, table_name)
            values2 = Storage.select_interval(
                con, start + no_event_shift - lin_window_size[0],
                start + no_event_shift + lin_window_size[1],
                derivation_attr_name, table_name)

            if not values2 or not values1:
                event['valid_event'] = False
                continue

            open_value = lin_reg(values1, lin_window_size[0] - 1)
            no_event_open_value = lin_reg(values2, lin_window_size[0] - 1)

            if no_event_open_value is None:
                t = DateTimeUtil.utc_timestamp_to_str(start + no_event_shift,
                                                      '%Y-%m-%d %H:%M:%S')
                logging.warning('no_event value is None: %s' % t)

            # derivacia pred otvorenim okna
            # generovanie derivacii medzi hodnou otvorenia okna a hodnotou niekde
            # v minulosti, ktora je posunuta o zadany interval dozadu
            for interval in intervals_before:
                value_time = start - interval
                v = Storage.select_interval(con,
                                            value_time - lin_window_size[0],
                                            value_time + lin_window_size[1],
                                            derivation_attr_name, table_name)

                if not v:
                    event['valid_event'] = False
                    break

                value = lin_reg(v, lin_window_size[0] - 1)

                derivation = None
                if value is not None and value is not None:
                    derivation = round((open_value - value) / interval,
                                       precision)

                event['derivation']['before'].append(derivation)

            # derivacia po otvoreni okna
            # generovanie derviacii medzi hodnotou otvorenia okna a hodnotou niekde,
            # v buducnosti, ktora je posunuta o zadany interval dopredu
            for interval in intervals_after:
                value_time = start + interval
                v = Storage.select_interval(con,
                                            value_time - lin_window_size[0],
                                            value_time + lin_window_size[1],
                                            derivation_attr_name, table_name)

                if not v:
                    event['valid_event'] = False
                    break

                value = lin_reg(v, lin_window_size[0] - 1)

                derivation = None
                if value is not None and value is not None:
                    derivation = round((value - open_value) / interval,
                                       precision)

                event['derivation']['after'].append(derivation)

            # derivacia pred no_event
            # generovanie derivacii medzi hodnou otvorenia okna a hodnotou niekde
            # v minulostia, ktora je posunuta o zadany interval dozadu
            # tento cas je posunuty este aj o posun danej udalosti
            for interval in intervals_before:
                value_time = start + no_event_shift - interval
                v = Storage.select_interval(con,
                                            value_time - lin_window_size[0],
                                            value_time + lin_window_size[1],
                                            derivation_attr_name, table_name)

                if not v:
                    event['valid_event'] = False
                    break

                value = lin_reg(v, lin_window_size[0] - 1)

                derivation = None
                if value is not None and value is not None and no_event_open_value is not None:
                    derivation = round(
                        (no_event_open_value - value) / interval, precision)
                else:
                    event['valid_event'] = False

                event['derivation']['no_event_before'].append(derivation)

            # derivacia pred po no_event
            # generovanie derivacii medzi hodnou otvorenia okna a hodnotou niekde
            # v minulostia, ktora je posunuta o zadany interval dozadu
            # tento cas je posunuty este aj o posun danej udalosti
            for interval in intervals_after:
                value_time = start + no_event_shift + interval
                v = Storage.select_interval(con,
                                            value_time - lin_window_size[0],
                                            value_time + lin_window_size[1],
                                            derivation_attr_name, table_name)

                if not v:
                    event['valid_event'] = False
                    break

                value = lin_reg(v, lin_window_size[0] - 1)

                derivation = None
                if value is not None and value is not None and no_event_open_value is not None:
                    derivation = round(
                        (value - no_event_open_value) / interval, precision)
                else:
                    event['valid_event'] = False

                event['derivation']['no_event_after'].append(derivation)

            event['derivation']['intervals_before'] = intervals_before
            event['derivation']['intervals_after'] = intervals_after
            event['derivation']['intervals_no_event_before'] = intervals_before
            event['derivation']['intervals_no_event_after'] = intervals_after
Пример #10
0
    title = 'window_size:{0},threshold:{1}'.format(window_size, threshold)
    gen_graph(events_delays, action, extensions, title)

    logger.disabled = False


if __name__ == '__main__':
    logging.basicConfig(level=logging.DEBUG,
                        format='%(asctime)s %(levelname)s %(message)s')

    logging.info('start')
    table_name = 'measured_filtered_peto'

    # stiahnutie dat
    con = ConnectionUtil.create_con()
    storage = Storage('examples/events_peto.json', 0, table_name)
    d = storage.load_data(con, 0, 0, 'co2_in_ppm')
    logging.info('downloaded events: %d' % len(d))

    # aplikovanie filtrov na eventy
    filtered = FilterUtil.only_valid_events(d)

    # for travis
    if ConnectionUtil.is_testable_system():
        filtered = filtered[:ConnectionUtil.MAX_TESTABLE_EVENTS]

    logging.info('events after applying the filter: %d' % len(filtered))

    extensions = ['eps']
    delays(filtered, extensions, ['save'], 11, 15)
    delays(filtered, extensions, ['save'], 16, 10)
Пример #11
0
def main(events_file: str, owner: str, start_shift: int, end_shift: int,
         output_filename: str, number_output_records: int):
    """

    :param events_file: path to file containing list of events
    :param owner: sensor owner(klarka|peto), name must be the same as in database
    :param start_shift: shift of beginning of data downloading
    :param end_shift: shift of end of data downloading
    :param output_filename: filename to store a graph
    :param number_output_records: number of points that are required in graph
    :return:
    """

    logging.info('start: ' + output_filename)
    graphs = Graph("./../../src/graph")

    # download data
    con = ConnectionUtil.create_con()
    storage = Storage(events_file, 0, 'measured_' + owner)
    d = storage.load_data(con, start_shift, end_shift,
                          'temperature_in_celsius')
    logging.info('downloaded events: %d' % len(d))

    # apply filters to downloaded data
    filtered = FilterUtil.only_valid_events(d)
    filtered = FilterUtil.temperature_diff(filtered, 5, 100)
    filtered = FilterUtil.temperature_out_max(filtered, 15)
    filtered = FilterUtil.humidity(filtered, 6, 1.6, 100)

    # for travis
    if ConnectionUtil.is_testable_system():
        filtered = filtered[:ConnectionUtil.MAX_TESTABLE_EVENTS]

    if owner == 'klarka':
        filtered = FilterUtil.attribute(filtered, 'window', 'dokoran')

    logging.info('events after applying the filter: %d' % len(filtered))

    # data for graph generation measured using sensor 1
    sensor1_events = filtered
    logging.info('event count: %d for senzor 1' % len(sensor1_events))

    # data for graph generation measured using sensor 2
    sensor2 = [
        'rh_in2_percentage', 'rh_in2_specific_g_kg', 'rh_in2_absolute_g_m3'
    ]
    sensor2_events = FilterUtil.measured_values_not_empty(filtered, sensor2)
    logging.info('event count: %d for senzor 2' % len(sensor2_events))

    # graph generation - sensor 1
    logging.info('start generating graphs of events from sensor 1')
    graphs_sensor_1 = []
    for event in sensor1_events:
        graphs_sensor_1 += generate_graphs_sensor_1(event, owner,
                                                    number_output_records)

    graphs.gen(graphs_sensor_1, 'sensor1_' + output_filename, 0, 0)
    logging.info('end generating graphs of events from sensor 1')

    # graph generation - sensor 2
    logging.info('start generating graphs of events from sensor 2')
    graphs_sensor_2 = []
    for event in sensor2_events:
        graphs_sensor_2 += generate_graphs_sensor_2(event, owner,
                                                    number_output_records)

    graphs.gen(graphs_sensor_2, 'sensor2_' + output_filename, 0, 0)
    logging.info('end generating graphs of events from sensor 2')

    logging.info('end')
Пример #12
0
def generate_file(con, start_shift, end_shift, output_file):
    logging.info('start: ' + output_file)

    graphs = Graph("./../../src/graph")

    # download data
    storage = Storage('examples/events_klarka_shower.json', 0, 'measured_klarka_shower')
    d = storage.load_data(con, start_shift, end_shift, 'temperature_in_celsius')
    logging.info('downloaded events: %d' % len(d))

    # apply filters to events
    filtered = FilterUtil.only_valid_events(d)

    # for travis
    if ConnectionUtil.is_testable_system():
        filtered = filtered[:ConnectionUtil.MAX_TESTABLE_EVENTS]

    logging.info('events after applying the filter: %d' % len(filtered))

    fill_start_end(filtered)

    logging.info('start generating graphs')
    gr = []
    for event in filtered:
        t = DateTimeUtil.utc_timestamp_to_str(event['e_start']['timestamp'], '%d.%m. %H:%M:%S')
        t += ' - '
        t += DateTimeUtil.utc_timestamp_to_str(event['e_end']['timestamp'], '%H:%M:%S')

        g = {
            'title': t,
            'group': 'one',
            'graphs': [
                Graph.db_to_simple_graph(event, 'temperature_in_celsius', 'blue',
                                         'Temperature', 75),
                Graph.db_to_simple_graph(event, 'open_close', 'orange', 'Open', 75),
            ]
        }
        gr.append(g)

        g = {
            'title': t,
            'group': 'two',
            'graphs': [
                Graph.db_to_simple_graph(event, 'rh_in_percentage', 'red',
                                         'Relative humidity [%]', 75),
                Graph.db_to_simple_graph(event, 'open_close', 'orange', 'Open', 75),
            ]
        }
        gr.append(g)

        g = {
            'title': t,
            'group': 'tree',
            'graphs': [
                Graph.db_to_simple_graph(event, 'rh_in_absolute_g_m3', 'green',
                                         'Absolute humidity [g/m3]', 75),
                Graph.db_to_simple_graph(event, 'open_close', 'orange', 'Open', 75),
            ]
        }
        gr.append(g)

        g = {
            'title': t,
            'group': 'four',
            'graphs': [
                Graph.db_to_simple_graph(event, 'rh_in_specific_g_kg', 'purple',
                                         'Specific humidity [g/kg]', 75),
                Graph.db_to_simple_graph(event, 'open_close', 'orange', 'Open', 75),
            ]
        }
        gr.append(g)

    graphs.gen(gr, output_file + '.html', 0, 0, global_range=True)
    logging.info('end generating graphs')
 def interval(self, column_name, start, end):
     return Storage.select_interval(self.con, start, end, column_name,
                                    self.table_name)
Пример #14
0
    def testing_data(con,
                     table_name,
                     start,
                     end,
                     write_each,
                     func,
                     row_selector,
                     interval_selector,
                     event_type,
                     log_every_hour=3):
        """Generation of testing data.

        :param con:
        :param table_name: table name
        :param start: interval from which testing data is generated
        :param end: interval to which testing data is generated
        :param write_each:
        :param func:
        :param row_selector:
        :param interval_selector:
        :return:
        """

        attrs = []
        bad_open_type_events = []
        global DATA_CACHE

        for t in range(start, end):
            if t % (log_every_hour * 3600) == 0:
                logging.debug(DateTimeUtil.utc_timestamp_to_str(t))

            act_row = None
            if act_row is None:
                previous_row = Storage.one_row(con, table_name, 'open_close',
                                               t - 1)
            else:
                previous_row = act_row
            act_row = Storage.one_row(con, table_name, 'open_close', t)

            if event_type not in ['open', 'close']:
                raise ValueError('event type must be: open or close')

            if previous_row is None or act_row is None:
                continue

            open_state = 'nothing'
            if event_type == 'open' and previous_row[0] == 0 and act_row[
                    0] == 1:
                open_state = event_type
            elif event_type == 'close' and previous_row[0] == 1 and act_row[
                    0] == 0:
                open_state = event_type

            if open_state == 'nothing':
                if t % write_each != 0:
                    continue

            try:
                DATA_CACHE = func(con, table_name, t, row_selector,
                                  interval_selector)
            except Exception as e:
                # logging.error(str(e))

                if open_state in ['open', 'close']:
                    bad_open_type_events.append(t)
                continue

            time = DateTimeUtil.utc_timestamp_to_str(t, '%Y/%m/%d %H:%M:%S')
            DATA_CACHE.insert(0, ('datetime', time))
            DATA_CACHE.insert(1, ('event', open_state))
            DATA_CACHE.append(('valid', 'yes'))
            attrs.append(OrderedDict(DATA_CACHE))

        if DATA_CACHE is None:
            logging.warning('any {0} events can be skipped'.format(event_type))
        else:
            tmp = {}
            for item in DATA_CACHE:
                key = item[0]
                tmp[key] = None

            tmp['event'] = event_type
            tmp['valid'] = 'no'
            for timestamp in bad_open_type_events:
                tmp['datetime'] = DateTimeUtil.utc_timestamp_to_str(
                    timestamp, '%Y/%m/%d %H:%M:%S')
                attrs.append(OrderedDict(tmp))

        return attrs
Пример #15
0
def main(events_file: str, start_shift: int, end_shift: int,
         output_filename: str, output_records: int):
    logging.info('start')
    graphs = Graph("./../../src/graph")

    # download data
    con = ConnectionUtil.create_con()
    storage = Storage(events_file, 0, 'measured_klarka')
    d = storage.load_data(con, start_shift, end_shift,
                          'temperature_in_celsius')
    logging.info('downloaded events: %d' % len(d))

    # apply filters to events
    filtered = FilterUtil.only_valid_events(d)
    filtered = FilterUtil.temperature_diff(filtered, 5, 100)
    filtered = FilterUtil.temperature_out_max(filtered, 15)
    filtered = FilterUtil.humidity(filtered, 6, 1.6, 100)

    min_timestamp = int(
        DateTimeUtil.local_time_str_to_utc('2018/11/01 00:01:00').timestamp())
    filtered = FilterUtil.min_timestamp(filtered, min_timestamp)

    filtered = FilterUtil.min_max_time_interval(filtered, 1440, 1620)

    # for travis
    if ConnectionUtil.is_testable_system():
        filtered = filtered[:ConnectionUtil.MAX_TESTABLE_EVENTS]

    logging.info('events after applying the filter: %d' % len(filtered))

    # data for graph generation measured using sensor 1
    sensor1_events = filtered
    logging.info('event count: %d for senzor 1' % len(sensor1_events))
    linear_reg(sensor1_events, 'rh_in_specific_g_kg', 'linear1_sh')
    linear_reg(sensor1_events, 'rh_in_absolute_g_m3', 'linear1_ah')
    linear_reg(sensor1_events, 'temperature_in_celsius', 'linear1_temp')

    # graph generation - sensor 1
    logging.info('start generating graphs of events from sensor 1')
    graphs_sensor_1 = []
    for event in sensor1_events:
        graphs_sensor_1 += gen_graphs(event, output_records, [
            'rh_in_specific_g_kg', 'rh_in_absolute_g_m3',
            'temperature_in_celsius'
        ], ['linear1_sh', 'linear1_ah', 'linear1_temp'])

    graphs.gen(graphs_sensor_1,
               'sensor1_' + output_filename,
               0,
               0,
               global_range=True)
    logging.info('end generating graphs of events from sensor 1')

    # data for graph generation measured using sensor 2
    sensor2_events = filtered
    logging.info('event count: %d for sensor 2' % len(sensor2_events))

    sensor2_events = FilterUtil.measured_values_not_empty(
        sensor2_events, 'rh_in2_specific_g_kg')
    sensor2_events = FilterUtil.measured_values_not_empty(
        sensor2_events, 'rh_in2_absolute_g_m3')
    sensor2_events = FilterUtil.measured_values_not_empty(
        sensor2_events, 'temperature_in2_celsius')
    logging.info('events after applying the filter: %d' % len(sensor2_events))

    linear_reg(sensor2_events, 'rh_in2_specific_g_kg', 'linear2_sh')
    linear_reg(sensor2_events, 'rh_in2_absolute_g_m3', 'linear2_ah')
    linear_reg(sensor2_events, 'temperature_in2_celsius', 'linear2_temp')

    humidity_info_csv(sensor2_events, start_shift, end_shift)

    # graph generation - sensor 2
    logging.info('start generating graphs of events from sensor 2')
    graphs_sensor_2 = []
    for event in sensor2_events:
        graphs_sensor_2 += gen_graphs(event, output_records, [
            'rh_in2_specific_g_kg', 'rh_in2_absolute_g_m3',
            'temperature_in2_celsius'
        ], ['linear2_sh', 'linear2_ah', 'linear2_temp'])

    graphs.gen(graphs_sensor_2,
               'sensor2_' + output_filename,
               0,
               0,
               global_range=True)
    logging.info('end generating graphs of events from sensor 2')

    logging.info('end')
Пример #16
0
    def prepare_derivation(con, events: list, intervals_before: list,
                           intervals_after: list, table_name: str,
                           precision: int, derivation_attr_name: str,
                           derivation_index: int):
        for i in range(0, len(events)):
            event = events[i]

            no_event_shift = event['no_event_time_shift']
            start = event['e_start']['timestamp']
            open_value = event['measured'][derivation_attr_name][0]
            no_event_open_value = event['no_event_values'][derivation_index]

            if no_event_open_value is None:
                t = DateTimeUtil.utc_timestamp_to_str(start + no_event_shift,
                                                      '%Y-%m-%d %H:%M:%S')
                logging.warning('no_event value is None: %s' % t)

            # derivacia pred otvorenim okna
            # generovanie derivacii medzi hodnou otvorenia okna a hodnotou niekde
            # v minulosti, ktora je posunuta o zadany interval dozadu
            for interval in intervals_before:
                value_time = start - interval
                value = Storage.one_row(con, table_name, derivation_attr_name,
                                        value_time)

                derivation = None
                if value is not None and value[0] is not None:
                    derivation = round(
                        (open_value - float(value[0])) / interval, precision)

                event['derivation']['before'].append(derivation)

            # derivacia po otvoreni okna
            # generovanie derviacii medzi hodnotou otvorenia okna a hodnotou niekde,
            # v buducnosti, ktora je posunuta o zadany interval dopredu
            for interval in intervals_after:
                value_time = start + interval
                value = Storage.one_row(con, table_name, derivation_attr_name,
                                        value_time)

                derivation = None
                if value is not None and value[0] is not None:
                    derivation = round(
                        (float(value[0]) - open_value) / interval, precision)

                event['derivation']['after'].append(derivation)

            # derivacia pred no_event
            # generovanie derivacii medzi hodnou otvorenia okna a hodnotou niekde
            # v minulostia, ktora je posunuta o zadany interval dozadu
            # tento cas je posunuty este aj o posun danej udalosti
            for interval in intervals_before:
                value_time = start + no_event_shift - interval
                value = Storage.one_row(con, table_name, derivation_attr_name,
                                        value_time)

                derivation = None
                if value is not None and value[
                        0] is not None and no_event_open_value is not None:
                    derivation = round(
                        (float(no_event_open_value) - float(value[0])) /
                        interval, precision)
                else:
                    event['valid_event'] = False

                event['derivation']['no_event_before'].append(derivation)

            # derivacia pred po no_event
            # generovanie derivacii medzi hodnou otvorenia okna a hodnotou niekde
            # v minulostia, ktora je posunuta o zadany interval dozadu
            # tento cas je posunuty este aj o posun danej udalosti
            for interval in intervals_after:
                value_time = start + no_event_shift + interval
                value = Storage.one_row(con, table_name, derivation_attr_name,
                                        value_time)

                derivation = None
                if value is not None and value[
                        0] is not None and no_event_open_value is not None:
                    derivation = round(
                        (float(value[0]) - float(no_event_open_value)) /
                        interval, precision)
                else:
                    event['valid_event'] = False

                event['derivation']['no_event_after'].append(derivation)

            event['derivation']['intervals_before'] = intervals_before
            event['derivation']['intervals_after'] = intervals_after
            event['derivation']['intervals_no_event_before'] = intervals_before
            event['derivation']['intervals_no_event_after'] = intervals_after

        return events
Пример #17
0
    # minimum
    ax.set_ylim(min(y1) - 50, max(y1)+50)
    ax.set_xlim(dt.datetime.fromtimestamp(raw_t[0]-5), dt.datetime.fromtimestamp(raw_t[-1]+5))

    filename = simple_graph.__name__ + '.eps'
    fig.canvas.set_window_title(filename)

    # nastavenie, aby sa aj pri malej figsize zobrazoval nazov X osy
    plt.tight_layout()

    fig.savefig(filename, bbox_inches='tight', pad_inches=0)


if __name__ == '__main__':
    logging.basicConfig(level=logging.DEBUG,
                        format='%(asctime)s %(levelname)s %(message)s')

    con = ConnectionUtil.create_con()

    start = int(DateTimeUtil.local_time_str_to_utc('2018/10/07 06:00:00').timestamp())
    end = int(DateTimeUtil.local_time_str_to_utc('2018/10/07 09:00:00').timestamp())
    table_name = 'measured_filtered_peto'

    all = Storage.dw_columns_ordered(con, start, end, 'measured_time,co2_in_ppm', table_name)
    CSVUtil.create_csv_file(all, 'test.csv')

    simple_graph('test.csv')

    # plt.show()