def generate_file(con, start_shift, end_shift, output_file, enable_regression): logging.info('start: ' + output_file) graphs = Graph("./../../src/graph") # stiahnutie dat storage = Storage('examples/events_peto.json', 0, 'measured_filtered_peto') d = storage.load_data(con, start_shift, end_shift, 'co2_in_ppm') logging.info('downloaded events: %d' % len(d)) # aplikovanie filtrov na eventy filtered = FilterUtil.only_valid_events(d) # for travis if ConnectionUtil.is_testable_system(): filtered = filtered[:ConnectionUtil.MAX_TESTABLE_EVENTS] logging.info('events after applying the filter: %d' % len(filtered)) # spocitanie regresie if enable_regression: filtered = compute_regression(filtered) logging.info('start generating graphs') gr = [] for event in filtered: t = DateTimeUtil.utc_timestamp_to_str(event['e_start']['timestamp'], '%d.%m. %H:%M:%S') t += ' - ' t += DateTimeUtil.utc_timestamp_to_str(event['e_end']['timestamp'], '%H:%M:%S') if enable_regression: gg = [ Graph.db_to_simple_graph(event, 'co2_in_ppm', 'green', 'CO2', 50), Graph.db_to_simple_graph(event, 'co2_in_ppm_exp', 'red', 'SimpleExpRegression', 50), Graph.db_to_simple_graph(event, 'co2_in_ppm_exp2', 'orange', 'ExpRegressionWithDelay', 50), ] else: gg = [ Graph.db_to_simple_graph(event, 'co2_in_ppm', 'green', 'CO2', 50), ] g = { 'title': t, 'graphs': gg } gr.append(g) graphs.gen(gr, output_file + '.html', 0, 0) logging.info('end generating graphs') logging.info('end')
def row(self, column_name, time): if column_name not in self.cache: self.cache[column_name] = {} res = Storage.select_interval(self.con, self.start, self.end, column_name, self.table_name, without_none_value=False) actual_timestamp = self.start for row in res: if row is None: self.cache[column_name][actual_timestamp] = None else: self.cache[column_name][actual_timestamp] = float(row) actual_timestamp += 1 if time in self.cache[column_name]: value = self.cache[column_name][time] else: value = super(CachedRowWithIntervalSelector, self).row(column_name, time) if value is None: t = DateTimeUtil.utc_timestamp_to_str(time, '%Y/%m/%d %H:%M:%S') raise ValueError('empty value at %s' % t) return value
def window_no_event_value(values: tuple, precision: int): outer_co2_ppm = 435 p = precision temperature_diff = values[4] - values[6] humidity_rh_diff = values[7] - values[13] humidity_abs_diff = values[9] - values[14] humidity_spec_diff = values[11] - values[15] return OrderedDict([ ('datetime', DateTimeUtil.utc_timestamp_to_str(values[0], '%Y-%m-%d %H:%M:%S')), ('event', 'nothing'), ('co2_in_ppm', round(values[16], p)), ('co2_out_ppm', round(outer_co2_ppm, p)), ('temperature_in_celsius', round(values[4], p)), ('temperature_out_celsius', round(values[6], p)), ('humidity_in_relative_percent', round(values[7], p)), ('humidity_in_absolute_g_m3', round(values[9], p)), ('humidity_in_specific_g_kg', round(values[11], p)), ('humidity_out_relative_percent', round(values[13], p)), ('humidity_out_absolute_g_m3', round(values[14], p)), ('humidity_out_specific_g_kg', round(values[15], p)), ('pressure_in_hpa', round(values[4], p)), ('temperature_celsius_difference', round(temperature_diff, p)), ('humidity_relative_percent_difference', round(humidity_rh_diff, p)), ('humidity_absolute_g_m3_difference', round(humidity_abs_diff, p)), ('humidity_specific_g_kg_difference', round(humidity_spec_diff, p)), ])
def row(self, column_name, time): res = Storage.one_row(self.con, self.table_name, column_name, time) if res is None or res[0] is None: t = DateTimeUtil.utc_timestamp_to_str(time, '%Y/%m/%d %H:%M:%S') raise ValueError('empty value at %s' % t) return float(res[0])
def row(self, column_name, time): if column_name not in self.cache: self.cache[column_name] = {} if time in self.cache[column_name]: value = self.cache[column_name][time] else: start = time - self.half_window_size end = time + self.half_window_size res = Storage.select_interval(self.con, start, end, column_name, self.table_name, without_none_value=False) error = False if res is None or None in res: error = True if error: self.cache[column_name][time] = None t = DateTimeUtil.utc_timestamp_to_str(time, '%Y/%m/%d %H:%M:%S') raise ValueError('empty value at %s' % t) x = [] y = [] for i in range(0, len(res)): x.append(i) y.append(res[i]) slope, intercept, _, _, _ = stats.linregress(x, y) value = intercept + slope * self.half_window_size self.cache[column_name][time] = value if value is None: t = DateTimeUtil.utc_timestamp_to_str(time, '%Y/%m/%d %H:%M:%S') raise ValueError('empty value at %s' % t) return value
def additional_training_set(con, table_name, no_event_records, func, row_selector, interval_selector, print_each=10): """Supplementary generation of training data based on given time points. :param con: :param table_name: table name :param no_event_records: list of pairs for attribute generation :param func: :param row_selector: :param interval_selector: :return: """ attrs = [] for k in range(0, len(no_event_records)): row = no_event_records[k] if k % print_each == 0: logging.debug('{0}/{1} events'.format(k, len(no_event_records))) if row[0] == '': logging.warning('empty row in additional sets') continue start = int(DateTimeUtil.local_time_str_to_utc(row[0]).timestamp()) try: data1 = func(con, table_name, start, row_selector, interval_selector) time = DateTimeUtil.utc_timestamp_to_str( start, '%Y/%m/%d %H:%M:%S') data1.insert(0, ('datetime', time)) data1.insert(1, ('event', row[1])) attrs.append(OrderedDict(data1)) except Exception as e: logging.error(str(e)) continue return attrs
def value_filter(data): for i in range(0, len(data)): row = data[i] for k in range(0, len(row)): item = row[k] key = 'rh_in_percentage' if key in item and item[key] is not None and float( item[key]) > 100: t = DateTimeUtil.utc_timestamp_to_str( item['measured_time']) value = float(item[key]) logging.error( '{0}, {1}: value {2} is out of range, skipped'.format( t, key, value)) item[key] = None return data
def row(self, column_name, time): if column_name not in self.cache: self.cache[column_name] = {} value = None if time in self.cache[column_name]: value = self.cache[column_name][time] else: res = Storage.one_row(self.con, self.table_name, column_name, time) if res is not None and res[0] is not None: self.cache[column_name][time] = float(res[0]) value = float(res[0]) if value is None: t = DateTimeUtil.utc_timestamp_to_str(time, '%Y/%m/%d %H:%M:%S') raise ValueError('empty value at %s' % t) return value
def insert_values(conn, table_name, values, maps, write_each, precision): for i in range(0, len(values)): value = values[i] t = () if i % write_each != 0: continue for column in DBUtil.measured_values_table_column_names(): if column == PreProcessing.TIME_STRING_ATTR_NAME: t += (DateTimeUtil.utc_timestamp_to_str( value[PreProcessing.TIME_ATTR_NAME]), ) continue if column in maps and value[column] is not None: t += (round(value[column], precision), ) else: t += (None, ) DBUtil.insert_value(conn, t, False, table_name)
def window_event_value(measured: dict, value_index: int, timestamp: int, precision: int): outer_co2_ppm = 435 p = precision i = value_index temperature_diff = measured['temperature_in_celsius'][i] - \ measured['temperature_out_celsius'][i] humidity_rh_diff = measured['rh_in_percentage'][i] - \ measured['rh_out_percentage'][i] humidity_abs_diff = measured['rh_in_absolute_g_m3'][i] - \ measured['rh_out_absolute_g_m3'][i] humidity_spec_diff = measured['rh_in_specific_g_kg'][i] - \ measured['rh_out_specific_g_kg'][i] return OrderedDict([ ('datetime', DateTimeUtil.utc_timestamp_to_str(timestamp, '%Y-%m-%d %H:%M:%S')), ('event', ValueUtil.detect_window_action(len(measured['co2_in_ppm']), value_index)), ('co2_in_ppm', round(measured['co2_in_ppm'][i], p)), ('co2_out_ppm', round(outer_co2_ppm, p)), ('temperature_in_celsius', round(measured['temperature_in_celsius'][i], p)), ('temperature_out_celsius', round(measured['temperature_out_celsius'][i], p)), ('humidity_in_relative_percent', round(measured['rh_in_percentage'][i], p)), ('humidity_in_absolute_g_m3', round(measured['rh_in_absolute_g_m3'][i], p)), ('humidity_in_specific_g_kg', round(measured['rh_in_specific_g_kg'][i], p)), ('humidity_out_relative_percent', round(measured['rh_out_percentage'][i], p)), ('humidity_out_absolute_g_m3', round(measured['rh_out_absolute_g_m3'][i], p)), ('humidity_out_specific_g_kg', round(measured['rh_out_specific_g_kg'][i], p)), ('pressure_in_hpa', round(measured['pressure_in_hpa'][i], p)), ('temperature_celsius_difference', round(temperature_diff, p)), ('humidity_relative_percent_difference', round(humidity_rh_diff, p)), ('humidity_absolute_g_m3_difference', round(humidity_abs_diff, p)), ('humidity_specific_g_kg_difference', round(humidity_spec_diff, p)), ])
def db_to_simple_graph(event, column, color, label, number_output_records): x = [] y = [] length = len(event['measured'][column]) step = 1 if number_output_records is not None: step = length // number_output_records # ak je step nula, znamena to, ze nie je dostatok udajov, vykreslime # vsetky dostupne data so step jedna if step == 0: step = 1 if step > 1: step += 1 start = event['e_start']['timestamp'] + event['start_shift'] for i in range(0, length): value = event['measured'][column][i] if i % step != 0: continue timestamp = start + i x.append(DateTimeUtil.utc_timestamp_to_str(timestamp, '%H:%M:%S')) if value is None: y.append('Null') else: y.append(value) return { 'timestamps': x, 'values': y, 'label_x': label, 'color': color, 'open_close': column == 'open_close', }
def training_data_without_opposite(con, table_name, events, func, row_selector, interval_selector): attrs = [] for k in range(0, len(events)): event = events[k] start = event['e_start']['timestamp'] end = event['e_end']['timestamp'] try: data1 = func(con, table_name, start, row_selector, interval_selector, end) time = DateTimeUtil.utc_timestamp_to_str( start, '%Y/%m/%d %H:%M:%S') data1.insert(0, ('datetime', time)) attrs.append(OrderedDict(data1)) except Exception as e: # logging.error(str(e)) continue return attrs
last_time_notification = None try: while not loop_end: time.sleep(SLEEP_TIME) sensor_info = cl.sensors_info(GATEWAY_ID, DEVICE_ID) value = extract_value(sensor_info, SENSOR_ID) measured_time = value['at'] measured_value = value['value'] if measured_value is None: continue time_out_str = DateTimeUtil.utc_timestamp_to_str(measured_time, '%Y/%m/%d %H:%M:%S') logging.debug(time_out_str + ': ' + measured_value) text = 'Aktualna koncentracia {0} ppm'.format(int(float(measured_value))) if last_time_notification is None or last_time_notification + DELAY_BETWEEN_SEND < time.time(): if float(measured_value) > CO2_LIMIT: text += ", (limit " + str(CO2_LIMIT) + " ppm), prosim vyvetrajte." last_time_notification = time.time() send_notification(text) logging.debug('send notification') os.remove(CONFIG_FILE) except KeyboardInterrupt:
def training_data(con, table_name, events, func, row_selector, interval_selector, event_type, print_each=10): """Generation of training data. :param con: :param table_name: table name :param events: event list :param func: :param row_selector: :param interval_selector: :return: """ training_events = [] attrs = [] for k in range(0, len(events)): event = events[k] start = event['e_start']['timestamp'] end = event['e_end']['timestamp'] no_event_start = start + event['no_event_time_shift'] no_event_end = end - event['no_event_time_shift'] if k % print_each == 0: logging.debug('{0}/{1} events'.format(k, len(events))) if event_type == 'open': event_time = start no_event_time = no_event_start elif event_type == 'close': event_time = end no_event_time = no_event_end else: raise ValueError('event type must be: open or close') try: data1 = func(con, table_name, event_time, row_selector, interval_selector) data2 = func(con, table_name, no_event_time, row_selector, interval_selector) time = DateTimeUtil.utc_timestamp_to_str( event_time, '%Y/%m/%d %H:%M:%S') data1.insert(0, ('datetime', time)) data1.insert(1, ('event', event_type)) attrs.append(OrderedDict(data1)) no_time = DateTimeUtil.utc_timestamp_to_str( no_event_time, '%Y/%m/%d %H:%M:%S') data2.insert(0, ('datetime', no_time)) data2.insert(1, ('event', 'nothing')) attrs.append(OrderedDict(data2)) training_events.append(event) except Exception as e: # logging.error(str(e)) continue return attrs, training_events
def testing_data(con, table_name, start, end, write_each, func, row_selector, interval_selector, event_type, log_every_hour=3): """Generation of testing data. :param con: :param table_name: table name :param start: interval from which testing data is generated :param end: interval to which testing data is generated :param write_each: :param func: :param row_selector: :param interval_selector: :return: """ attrs = [] bad_open_type_events = [] global DATA_CACHE for t in range(start, end): if t % (log_every_hour * 3600) == 0: logging.debug(DateTimeUtil.utc_timestamp_to_str(t)) act_row = None if act_row is None: previous_row = Storage.one_row(con, table_name, 'open_close', t - 1) else: previous_row = act_row act_row = Storage.one_row(con, table_name, 'open_close', t) if event_type not in ['open', 'close']: raise ValueError('event type must be: open or close') if previous_row is None or act_row is None: continue open_state = 'nothing' if event_type == 'open' and previous_row[0] == 0 and act_row[ 0] == 1: open_state = event_type elif event_type == 'close' and previous_row[0] == 1 and act_row[ 0] == 0: open_state = event_type if open_state == 'nothing': if t % write_each != 0: continue try: DATA_CACHE = func(con, table_name, t, row_selector, interval_selector) except Exception as e: # logging.error(str(e)) if open_state in ['open', 'close']: bad_open_type_events.append(t) continue time = DateTimeUtil.utc_timestamp_to_str(t, '%Y/%m/%d %H:%M:%S') DATA_CACHE.insert(0, ('datetime', time)) DATA_CACHE.insert(1, ('event', open_state)) DATA_CACHE.append(('valid', 'yes')) attrs.append(OrderedDict(DATA_CACHE)) if DATA_CACHE is None: logging.warning('any {0} events can be skipped'.format(event_type)) else: tmp = {} for item in DATA_CACHE: key = item[0] tmp[key] = None tmp['event'] = event_type tmp['valid'] = 'no' for timestamp in bad_open_type_events: tmp['datetime'] = DateTimeUtil.utc_timestamp_to_str( timestamp, '%Y/%m/%d %H:%M:%S') attrs.append(OrderedDict(tmp)) return attrs
def gen_graphs(event: dict, number_output_records: int, attr_name: list, lin_reg_attr_name: list): """Generates graph based on data measured using sensor 1. :param event: basic information about event :param number_output_records: number of points that are required in graph :param attr_name: :param lin_reg_attr_name: :return: graph that can contain several graphs """ n = number_output_records graphs = [] t = DateTimeUtil.utc_timestamp_to_str(event['e_start']['timestamp'], '%d.%m. %H:%M:%S') t += ' - ' t += DateTimeUtil.utc_timestamp_to_str(event['e_end']['timestamp'], '%H:%M:%S') g = { 'title': 'Specific hum in and out ' + t, 'group': 'one', 'graphs': [ Graph.db_to_simple_graph(event, attr_name[0], 'blue', 'sh in', n), Graph.db_to_simple_graph(event, 'rh_out_specific_g_kg', 'red', 'sh out', n), Graph.db_to_simple_graph(event, lin_reg_attr_name[0], 'orange', 'sh lin reg', n), ] } graphs.append(g) g = { 'title': 'Absolute hum in and out ' + t, 'group': 'one', 'graphs': [ Graph.db_to_simple_graph(event, attr_name[1], 'blue', 'ah in', n), Graph.db_to_simple_graph(event, 'rh_out_absolute_g_m3', 'red', 'ah out', n), Graph.db_to_simple_graph(event, lin_reg_attr_name[1], 'orange', 'ah lin reg', n), ] } graphs.append(g) g = { 'title': 'Temperature in and out ' + t, 'group': 'one', 'graphs': [ Graph.db_to_simple_graph(event, attr_name[2], 'blue', 'temp in', n), Graph.db_to_simple_graph(event, 'temperature_out_celsius', 'red', 'temp out', n), Graph.db_to_simple_graph(event, lin_reg_attr_name[2], 'orange', 'temp lin reg', n), ] } graphs.append(g) return graphs
def prepare_derivation(con, events: list, intervals_before: list, intervals_after: list, table_name: str, precision: int, derivation_attr_name: str, derivation_index: int): for i in range(0, len(events)): event = events[i] no_event_shift = event['no_event_time_shift'] start = event['e_start']['timestamp'] open_value = event['measured'][derivation_attr_name][0] no_event_open_value = event['no_event_values'][derivation_index] if no_event_open_value is None: t = DateTimeUtil.utc_timestamp_to_str(start + no_event_shift, '%Y-%m-%d %H:%M:%S') logging.warning('no_event value is None: %s' % t) # derivacia pred otvorenim okna # generovanie derivacii medzi hodnou otvorenia okna a hodnotou niekde # v minulosti, ktora je posunuta o zadany interval dozadu for interval in intervals_before: value_time = start - interval value = Storage.one_row(con, table_name, derivation_attr_name, value_time) derivation = None if value is not None and value[0] is not None: derivation = round( (open_value - float(value[0])) / interval, precision) event['derivation']['before'].append(derivation) # derivacia po otvoreni okna # generovanie derviacii medzi hodnotou otvorenia okna a hodnotou niekde, # v buducnosti, ktora je posunuta o zadany interval dopredu for interval in intervals_after: value_time = start + interval value = Storage.one_row(con, table_name, derivation_attr_name, value_time) derivation = None if value is not None and value[0] is not None: derivation = round( (float(value[0]) - open_value) / interval, precision) event['derivation']['after'].append(derivation) # derivacia pred no_event # generovanie derivacii medzi hodnou otvorenia okna a hodnotou niekde # v minulostia, ktora je posunuta o zadany interval dozadu # tento cas je posunuty este aj o posun danej udalosti for interval in intervals_before: value_time = start + no_event_shift - interval value = Storage.one_row(con, table_name, derivation_attr_name, value_time) derivation = None if value is not None and value[ 0] is not None and no_event_open_value is not None: derivation = round( (float(no_event_open_value) - float(value[0])) / interval, precision) else: event['valid_event'] = False event['derivation']['no_event_before'].append(derivation) # derivacia pred po no_event # generovanie derivacii medzi hodnou otvorenia okna a hodnotou niekde # v minulostia, ktora je posunuta o zadany interval dozadu # tento cas je posunuty este aj o posun danej udalosti for interval in intervals_after: value_time = start + no_event_shift + interval value = Storage.one_row(con, table_name, derivation_attr_name, value_time) derivation = None if value is not None and value[ 0] is not None and no_event_open_value is not None: derivation = round( (float(value[0]) - float(no_event_open_value)) / interval, precision) else: event['valid_event'] = False event['derivation']['no_event_after'].append(derivation) event['derivation']['intervals_before'] = intervals_before event['derivation']['intervals_after'] = intervals_after event['derivation']['intervals_no_event_before'] = intervals_before event['derivation']['intervals_no_event_after'] = intervals_after return events
def generate_file(con, start_shift, end_shift, output_file): logging.info('start: ' + output_file) graphs = Graph("./../../src/graph") # download data storage = Storage('examples/events_klarka_shower.json', 0, 'measured_klarka_shower') d = storage.load_data(con, start_shift, end_shift, 'temperature_in_celsius') logging.info('downloaded events: %d' % len(d)) # apply filters to events filtered = FilterUtil.only_valid_events(d) # for travis if ConnectionUtil.is_testable_system(): filtered = filtered[:ConnectionUtil.MAX_TESTABLE_EVENTS] logging.info('events after applying the filter: %d' % len(filtered)) fill_start_end(filtered) logging.info('start generating graphs') gr = [] for event in filtered: t = DateTimeUtil.utc_timestamp_to_str(event['e_start']['timestamp'], '%d.%m. %H:%M:%S') t += ' - ' t += DateTimeUtil.utc_timestamp_to_str(event['e_end']['timestamp'], '%H:%M:%S') g = { 'title': t, 'group': 'one', 'graphs': [ Graph.db_to_simple_graph(event, 'temperature_in_celsius', 'blue', 'Temperature', 75), Graph.db_to_simple_graph(event, 'open_close', 'orange', 'Open', 75), ] } gr.append(g) g = { 'title': t, 'group': 'two', 'graphs': [ Graph.db_to_simple_graph(event, 'rh_in_percentage', 'red', 'Relative humidity [%]', 75), Graph.db_to_simple_graph(event, 'open_close', 'orange', 'Open', 75), ] } gr.append(g) g = { 'title': t, 'group': 'tree', 'graphs': [ Graph.db_to_simple_graph(event, 'rh_in_absolute_g_m3', 'green', 'Absolute humidity [g/m3]', 75), Graph.db_to_simple_graph(event, 'open_close', 'orange', 'Open', 75), ] } gr.append(g) g = { 'title': t, 'group': 'four', 'graphs': [ Graph.db_to_simple_graph(event, 'rh_in_specific_g_kg', 'purple', 'Specific humidity [g/kg]', 75), Graph.db_to_simple_graph(event, 'open_close', 'orange', 'Open', 75), ] } gr.append(g) graphs.gen(gr, output_file + '.html', 0, 0, global_range=True) logging.info('end generating graphs')
def with_delay(self, before, after): nothing_as_true_nothing = 0 open_as_true_nothing = 0 open_as_true_open = 0 nothing_as_true_open = 0 extended = {} invalid = {} wrong_prediction = [] if self.__data == []: self.__read() intervals = [] for row in self.__data: if row['event'] == self.__event_type: t = row['datetime'] intervals.append((t - before, t, t + after)) intervals.sort() for i in range(1, len(intervals)): if intervals[i - 1][2] > intervals[i][0]: t1 = DateTimeUtil.utc_timestamp_to_str(intervals[i - 1][1], '%d.%m. %H:%M:%S') t2 = DateTimeUtil.utc_timestamp_to_str(intervals[i][1], '%d.%m. %H:%M:%S') print('prekryvajuce sa intervaly {0} a {1}'.format(t1, t2)) for row in intervals: extended[row[1]] = [] invalid[row[1]] = [] for row in self.__data: found = False for interval in intervals: if interval[0] < row['datetime'] < interval[2]: extended[interval[1]].append(row['prediction']) invalid[interval[1]].append(row['valid']) found = True if found or row['valid'] == 'no': continue if row['event'] == row['prediction']: if row['event'] == self.__event_type: open_as_true_open += 1 elif row['event'] == 'nothing': nothing_as_true_nothing += 1 else: raise ValueError('error') else: if row['event'] == 'nothing' and row['prediction'] == self.__event_type: open_as_true_nothing += 1 if row['event'] != self.__event_type: wrong_prediction.append(row['readable']) elif row['prediction'] != '': nothing_as_true_open += 1 for key, interval in extended.items(): if len(interval) == 1 and 'no' in invalid[key]: continue found = False for k in range(0, len(interval)): row = interval[k] if row == self.__event_type: found = True break if found: if 'no' not in invalid[key]: open_as_true_open += 1 else: if 'no' not in invalid[key]: nothing_as_true_open += 1 nothing_as_true_nothing += len(interval) - 1 res = { 'records': self.count, 'accuracy': round(((nothing_as_true_nothing + open_as_true_open) / self.count) * 100, 2), 'nothing_as_true_nothing': nothing_as_true_nothing, 'open_as_true_nothing': open_as_true_nothing, 'open_as_true_open': open_as_true_open, 'nothing_as_true_open': nothing_as_true_open, } res['sum'] = res['nothing_as_true_nothing'] + res['open_as_true_nothing'] + res['open_as_true_open'] \ + res['nothing_as_true_open'] return self.__simple_table(res), wrong_prediction, res
def generate_graphs_sensor_1(event: dict, owner: str, number_output_records: int): """Generates graph based on data measured using sensor 1. :param event: basic information about event :param owner: sensor owner(klarka|peto), name must be the same as in database :param number_output_records: number of points that are required in graph :return: graph that can contain several graphs """ n = number_output_records graphs = [] t = DateTimeUtil.utc_timestamp_to_str(event['e_start']['timestamp'], '%d.%m. %H:%M:%S') t += ' - ' t += DateTimeUtil.utc_timestamp_to_str(event['e_end']['timestamp'], '%H:%M:%S') g = { 'title': 'Temp in and out ' + t, 'stat': generate_info(event, owner), 'graphs': [ Graph.db_to_simple_graph(event, 'temperature_in_celsius', 'DarkRed', 'temp in', n), Graph.db_to_simple_graph(event, 'temperature_out_celsius', 'LightCoral', 'temp out', n) ] } graphs.append(g) g = { 'title': 'Relative hum in and out ' + t, 'graphs': [ Graph.db_to_simple_graph(event, 'rh_in_percentage', 'blue', 'hum in', n), Graph.db_to_simple_graph(event, 'rh_out_percentage', 'red', 'hum out', n), ] } graphs.append(g) g = { 'title': 'Specific hum in and out ' + t, 'graphs': [ Graph.db_to_simple_graph(event, 'rh_in_specific_g_kg', 'blue', 'hum in', n), Graph.db_to_simple_graph(event, 'rh_out_specific_g_kg', 'red', 'hum out', n), ] } graphs.append(g) g = { 'title': 'Absolute hum in and out ' + t, 'graphs': [ Graph.db_to_simple_graph(event, 'rh_in_absolute_g_m3', 'blue', 'hum in', n), Graph.db_to_simple_graph(event, 'rh_out_absolute_g_m3', 'red', 'hum out', n), ] } graphs.append(g) return graphs
def prepare_derivation_lin_reg(con, events: list, intervals_before: list, intervals_after: list, table_name: str, precision: int, derivation_attr_name: str, lin_window_size: tuple): for i in range(0, len(events)): event = events[i] no_event_shift = event['no_event_time_shift'] start = event['e_start']['timestamp'] values1 = Storage.select_interval(con, start - lin_window_size[0], start + lin_window_size[1], derivation_attr_name, table_name) values2 = Storage.select_interval( con, start + no_event_shift - lin_window_size[0], start + no_event_shift + lin_window_size[1], derivation_attr_name, table_name) if not values2 or not values1: event['valid_event'] = False continue open_value = lin_reg(values1, lin_window_size[0] - 1) no_event_open_value = lin_reg(values2, lin_window_size[0] - 1) if no_event_open_value is None: t = DateTimeUtil.utc_timestamp_to_str(start + no_event_shift, '%Y-%m-%d %H:%M:%S') logging.warning('no_event value is None: %s' % t) # derivacia pred otvorenim okna # generovanie derivacii medzi hodnou otvorenia okna a hodnotou niekde # v minulosti, ktora je posunuta o zadany interval dozadu for interval in intervals_before: value_time = start - interval v = Storage.select_interval(con, value_time - lin_window_size[0], value_time + lin_window_size[1], derivation_attr_name, table_name) if not v: event['valid_event'] = False break value = lin_reg(v, lin_window_size[0] - 1) derivation = None if value is not None and value is not None: derivation = round((open_value - value) / interval, precision) event['derivation']['before'].append(derivation) # derivacia po otvoreni okna # generovanie derviacii medzi hodnotou otvorenia okna a hodnotou niekde, # v buducnosti, ktora je posunuta o zadany interval dopredu for interval in intervals_after: value_time = start + interval v = Storage.select_interval(con, value_time - lin_window_size[0], value_time + lin_window_size[1], derivation_attr_name, table_name) if not v: event['valid_event'] = False break value = lin_reg(v, lin_window_size[0] - 1) derivation = None if value is not None and value is not None: derivation = round((value - open_value) / interval, precision) event['derivation']['after'].append(derivation) # derivacia pred no_event # generovanie derivacii medzi hodnou otvorenia okna a hodnotou niekde # v minulostia, ktora je posunuta o zadany interval dozadu # tento cas je posunuty este aj o posun danej udalosti for interval in intervals_before: value_time = start + no_event_shift - interval v = Storage.select_interval(con, value_time - lin_window_size[0], value_time + lin_window_size[1], derivation_attr_name, table_name) if not v: event['valid_event'] = False break value = lin_reg(v, lin_window_size[0] - 1) derivation = None if value is not None and value is not None and no_event_open_value is not None: derivation = round( (no_event_open_value - value) / interval, precision) else: event['valid_event'] = False event['derivation']['no_event_before'].append(derivation) # derivacia pred po no_event # generovanie derivacii medzi hodnou otvorenia okna a hodnotou niekde # v minulostia, ktora je posunuta o zadany interval dozadu # tento cas je posunuty este aj o posun danej udalosti for interval in intervals_after: value_time = start + no_event_shift + interval v = Storage.select_interval(con, value_time - lin_window_size[0], value_time + lin_window_size[1], derivation_attr_name, table_name) if not v: event['valid_event'] = False break value = lin_reg(v, lin_window_size[0] - 1) derivation = None if value is not None and value is not None and no_event_open_value is not None: derivation = round( (value - no_event_open_value) / interval, precision) else: event['valid_event'] = False event['derivation']['no_event_after'].append(derivation) event['derivation']['intervals_before'] = intervals_before event['derivation']['intervals_after'] = intervals_after event['derivation']['intervals_no_event_before'] = intervals_before event['derivation']['intervals_no_event_after'] = intervals_after