Esempio n. 1
0
def _web_post_changes_enerpi_encryption_key(dict_web_form, lines_keyfile, dict_key_file, dest_filepath=None):
    """
    Process changes in web editor of ENERPI BROADCAST ENCRYPTION KEY
    # Secret Key edition!! NOT SAFE AT ALL!! (Debugging purposes)

    :param dict_web_form: :OrderedDict: Posted Form with new crypto key
    :param lines_keyfile: :list: original file text lines (1) with original cryto key
    :param dict_key_file: :OrderedDict: original config dict of dicts
                            (like the one 'web_edit_enerpi_encryption_key' returns)
    :param dest_filepath: :str: (optional) destination filepath for save configuration changes
    :return: :tuple: (:dict: alert message, :list: text lines, :OrderedDict: updated dict_config))

    """
    t, sub = TITLE_EDIT_CRYPTOKEY, SUBTITLE_EDIT_CRYPTOKEY
    ant_key = dict_key_file[t][sub][0]
    new_key = dict_web_form[sub]
    alerta = {}
    if new_key != ant_key:
        try:  # Validate new Fernet Key
            _ = get_codec(new_key.encode())
            if len(new_key) > 10:
                if dest_filepath is not None:
                    log('The new KEY ("{}")\nwill be saved in "{}"\n'.format(new_key, dest_filepath), 'warning', False)
                    with open(dest_filepath, 'wb') as f:
                        f.write(new_key.encode())
                str_cambios = ('Configuration changes in encryption key:<br>{}<br> New config SAVED!'
                               .format('- <strong>"{}"</strong> (before={}) -> <strong>"{}"</strong>'
                                       .format(SUBTITLE_EDIT_CRYPTOKEY, ant_key, new_key)))
                log(str_cambios, 'debug', False)
                alerta = {'alert_type': 'warning', 'texto_alerta': str_cambios}
                lines_keyfile = [new_key]
                dict_key_file = OrderedDict([(t, OrderedDict([(sub, (new_key, 'text', None))]))])
        except AssertionError as e:
            alerta = {'alert_type': 'danger', 'texto_alerta': 'Not a valid KEY: {}. New KEY was: {}'.format(e, new_key)}
    return alerta, lines_keyfile, dict_key_file
Esempio n. 2
0
def showfile(file='flask'):
    """
    Página de vista de fichero de texto, con orden ascendente / descendente y/o nº de últimas líneas ('tail' de archivo)

    :param str file: file_id to show

    """
    ok, is_logfile, filename = _get_filepath_from_file_id(file)
    if ok:
        delete = request.args.get('delete', '')
        reverse = request.args.get('reverse', False)
        tail_lines = request.args.get('tail', None)
        alerta = request.args.get('alerta', '')
        if alerta:
            alerta = json.loads(alerta)
        if not alerta and delete and is_logfile:
            with open(filename, 'w') as f:
                f.close()
            cad_delete = 'LOGFILE {} DELETED'.format(filename.upper())
            log(cad_delete, 'warn', False)
            return redirect(url_for('showfile', file=file,
                                    alerta=json.dumps({'alert_type': 'warning', 'texto_alerta': cad_delete})))
        data = get_lines_file(filename, tail=tail_lines, reverse=reverse)
        return render_template('text_file.html', titulo='LOG File:' if is_logfile else 'CONFIG File:', file_id=file,
                               subtitulo='<strong>{}</strong>'.format(filename), is_logfile=is_logfile,
                               file_content=data, filename=os.path.basename(filename), alerta=alerta)
    return abort(404)
Esempio n. 3
0
def get_catalog_paths(start=None, end=None, last_hours=None, min_ts=None):
    """
    Return list of relative paths to hdf stores for a time slice.

    :param start: str or datetime like object for start
    :param end: str or datetime like object for end
    :param last_hours: str or int for slice time from now - 'last_hours' to now
    :param min_ts: optional absolute datetime minimum (datetime like object)
    :return: list of relpaths

    """
    t0, tf = _get_time_slice(start, end, last_hours, min_ts=min_ts)
    ahora = dt.datetime.now()
    try:
        t0 = pd.Timestamp(t0).to_pydatetime()
        tf = pd.Timestamp(tf).to_pydatetime() if tf else ahora
    except ValueError as e:
        log(
            'ValueError "{}" in _get_paths with ts_ini={} & ts_fin={}'.format(
                e, t0, tf), 'error')
        return []
    periods = (tf.year * 12 + tf.month) - (t0.year * 12 + t0.month)
    index = pd.DatetimeIndex(freq='M', start=t0.date(), periods=periods + 1)
    paths = []
    for i in index:
        if (ahora.year == i.year) and (ahora.month == i.month):
            init = ahora.replace(day=1).date() if len(paths) > 0 else t0.date()
            index_d = pd.DatetimeIndex(freq='D',
                                       start=init,
                                       periods=tf.day - init.day + 1)
            [paths.append(_make_index_path(i, w_day=True)) for i in index_d]
        else:
            paths.append(_make_index_path(i, w_day=False))
    return paths
Esempio n. 4
0
 def reprocess_all_data(self):
     """
     Load & reprocess all data in catalog.
     Useful when changing summary data construction (for version changes)
     :return: ok operation
     :rtype: bool
     """
     if self.tree is not None:
         paths_w_summary = self.tree[(self.tree.key == self.key_summary) & self.tree.is_cat]
         for path in paths_w_summary.st:
             df = self.process_data(self._load_store(path))
             df_s = self.process_data_summary(df)
             if (df is not None) and not df.empty:
                 self._save_hdf([df, df_s], path, [self.key_raw, self.key_summary], mode='w', **KWARGS_SAVE)
         path_today = self.tree[self.tree.st.str.contains(ST_TODAY) & self.tree.is_cat]
         if not path_today.empty:
             path = path_today.st.iloc[0]
             df = self.process_data(self._load_store(path))
             if (df is not None) and not df.empty:
                 self._save_hdf(df, path, self.key_raw, mode='w', **KWARGS_SAVE)
         self.tree = self._get_index(check_index=True)
         return True
     else:
         log('No data to reprocess!', 'error', self.verbose)
         return False
Esempio n. 5
0
def write_fig_to_svg(fig, name_img, preserve_ratio=False):
    """
    Write matplotlib figure to disk in SVG format.

    :param matplotlib.figure fig: figure to export
    :param str name_img: desired image path
    :param bool preserve_ratio: preserve size ratio on the SVG file (default: False)
    :return: operation ok
    :rtype: bool

    """
    canvas = FigureCanvas(fig)
    output = BytesIO()
    imgformat = 'svg'
    canvas.print_figure(output, format=imgformat, transparent=True)
    svg_out = output.getvalue()
    if not preserve_ratio:
        svg_out = REGEXPR_SVG_WIDTH.sub(' width="100%" preserveAspectRatio="none"',
                                        REGEXPR_SVG_HEIGHT.sub('<svg height="100%"', svg_out.decode())).encode()
    try:
        with open(name_img, 'wb') as f:
            f.write(svg_out)
    except OSError as e:
        log('OSError writing SVG on disk: {} [{}]'.format(e, e.__class__), 'error', True)
        return False
    return True
Esempio n. 6
0
def _web_edit_enerpi_sensors_json(lines_json):
    """
    * Get JSON sensors config for web user edition:

    Make Ordered dict like:
    ==> [(section_name,
            OrderedDict([(VARIABLE, (VALUE, 'int|float|bool|text', comment)),
                         (VARIABLE, (VALUE, 'int|float|bool|text', comment)),
                         ...
                         (VARIABLE, (VALUE, 'int|float|bool|text', comment))])),
         (section_name,
            OrderedDict([(VARIABLE, (VALUE, 'int|float|bool|text', comment)),
                         (VARIABLE, (VALUE, 'int|float|bool|text', comment)),
                         ...
                         (VARIABLE, (VALUE, 'int|float|bool|text', comment))])),
         ...]

    :param lines_json: Lines of JSON file
    :return: :tuple of (ok, dict_file_for_webform):

    """
    # TODO JSON EDITOR
    try:
        sensors_json = json.loads('\n'.join(lines_json), encoding=ENCODING)
        t, sub = TITLE_EDIT_JS_SENSORS, SUBTITLE_EDIT_JS_SENSORS
        d_conf = OrderedDict([(t, OrderedDict([(sub, (json.dumps(sensors_json, indent=1), 'text', None))]))])
        return True, d_conf
    except json.decoder.JSONDecodeError as e:
        msg = 'JSONDecodeError reading new SENSORS JSON: {} --> {}'.format(e, lines_json)
        log(msg, 'error')
        return False, {'error': msg}
Esempio n. 7
0
 def _is_changed(value, params, name):
     if (value is None) or ((len(value) == 0) and
                            (name not in OPTIONAL_PARAMS)):
         msg = 'INI Value ERROR: key={}, value="{}", (type={})'.format(
             name, value, params[1])
         return False, value, msg
     try:
         if params[1] == 'int':
             try:
                 value = int(value)
             except ValueError:
                 value = float(value)
         elif params[1] == 'float':
             value = float(value)
         elif params[1] == 'bool':
             value = (value.lower() == 'true') or (value.lower() == 'on')
         if value != params[0]:
             log(
                 '"{}" -> HAY CAMBIO DE {} a {} (type={})'.format(
                     name, params[0], value, params[1]), 'debug', False)
             return True, value, None
         return False, value, None
     except ValueError as e:
         msg = 'ValueError reading INI: key={}, value={}, (type={}). Error: {}'.format(
             name, value, params[1], e)
         return False, value, msg
Esempio n. 8
0
def editfile(file='config'):
    """
    Configuration editor, for INI file, JSON sensors file & encripting key

    :param str file: file_id to edit

    """
    ok, is_logfile, filename = _get_filepath_from_file_id(file)
    if ok and (file in ENERPI_CONFIG_FILES):
        alerta = request.args.get('alerta', '')
        if alerta:
            alerta = json.loads(alerta)
        extra_links = [(ENERPI_CONFIG_FILES[f_id]['text_button'], url_for('editfile', file=f_id))
                       for f_id in sorted(ENERPI_CONFIG_FILES.keys(), key=lambda x: ENERPI_CONFIG_FILES[x]['order'])]
        show_switch_comments = ENERPI_CONFIG_FILES[file]['show_switch_comments']
        if not show_switch_comments:
            without_comments = True
        else:
            without_comments = request.args.get('without_comments', 'False').lower() == 'true'
        d_edit = request.form if request.method == 'POST' else None
        alerta_edit, lines_config, config_data = web_config_edition_data(file, filename, d_edition_form=d_edit)
        if not alerta:
            alerta = alerta_edit
        elif alerta_edit is not None:
            alerta.update(alerta_edit)
        return render_template('edit_text_file.html', titulo='CONFIG EDITOR', file_id=file,
                               show_switch_comments=show_switch_comments, with_comments=not without_comments,
                               abspath=filename, dict_config_content=config_data, file_lines=lines_config,
                               filename=os.path.basename(filename), alerta=alerta, extra_links=extra_links)
    log('Error in editfile with file={}'.format(file), 'error', False)
    return abort(404)
Esempio n. 9
0
def _get_filepath_from_file_id(file_id):
    # Interesting files / logs to show/edit/download/upload:
    if file_id in ENERPI_CONFIG_FILES:
        return True, False, os.path.join(DATA_PATH, ENERPI_CONFIG_FILES[file_id]['filename'])
    elif 'flask' == file_id:
        is_logfile, filename = True, SERVER_FILE_LOGGING
    elif 'rsc' == file_id:
        is_logfile, filename = True, SERVER_FILE_LOGGING_RSCGEN
    elif 'nginx_err' == file_id:
        is_logfile, filename = True, '/var/log/nginx/error.log'
    elif 'nginx' == file_id:
        is_logfile, filename = True, '/var/log/nginx/access.log'
    elif 'enerpi' == file_id:
        is_logfile, filename = True, FILE_LOGGING
    elif 'uwsgi' == file_id:
        is_logfile, filename = True, '/var/log/uwsgi/{}.log'.format(os.path.splitext(UWSGI_CONFIG_FILE)[0])
    elif 'daemon_out' == file_id:
        is_logfile, filename = True, DAEMON_STDOUT
    elif 'daemon_err' == file_id:
        is_logfile, filename = True, DAEMON_STDERR
    else:  # Fichero derivado del catálogo
        cat = enerpi_data_catalog(check_integrity=False)
        if 'raw_store' == file_id:
            is_logfile, filename = False, os.path.join(cat.base_path, cat.raw_store)
        elif 'catalog' == file_id:
            is_logfile, filename = False, os.path.join(cat.base_path, cat.catalog_file)
        else:
            log('FILE_ID No reconocido: {}'.format(file_id), 'error', False)
            is_logfile, filename = False, SERVER_FILE_LOGGING
            return False, is_logfile, filename
    return True, is_logfile, filename
Esempio n. 10
0
 def _get_broadcast_socket(verb=False):
     sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
     # sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 32)
     sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
     sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
     log(_DESCRIPTION_IO.format(_UDP_IP, port), 'ok', verb, verb)
     return sock
Esempio n. 11
0
def get_catalog_paths(start=None, end=None, last_hours=None, min_ts=None):
    """
    Return list of relative paths to hdf stores for a time slice.

    :param start: str or datetime like object for start
    :param end: str or datetime like object for end
    :param last_hours: str or int for slice time from now - 'last_hours' to now
    :param min_ts: optional absolute datetime minimum (datetime like object)
    :return: list of relpaths

    """
    t0, tf = _get_time_slice(start, end, last_hours, min_ts=min_ts)
    ahora = dt.datetime.now()
    try:
        t0 = pd.Timestamp(t0).to_pydatetime()
        tf = pd.Timestamp(tf).to_pydatetime() if tf else ahora
    except ValueError as e:
        log('ValueError "{}" in _get_paths with ts_ini={} & ts_fin={}'.format(e, t0, tf), 'error')
        return []
    periods = (tf.year * 12 + tf.month) - (t0.year * 12 + t0.month)
    index = pd.DatetimeIndex(freq='M', start=t0.date(), periods=periods + 1)
    paths = []
    for i in index:
        if (ahora.year == i.year) and (ahora.month == i.month):
            init = ahora.replace(day=1).date() if len(paths) > 0 else t0.date()
            index_d = pd.DatetimeIndex(freq='D', start=init, periods=tf.day - init.day + 1)
            [paths.append(_make_index_path(i, w_day=True)) for i in index_d]
        else:
            paths.append(_make_index_path(i, w_day=False))
    return paths
Esempio n. 12
0
def _compress_data(data, sensors):
    """
    Down-casting of raw data to minimize disk usage.

    RMS sensor data is reduced to float32
    MEAN sensor data take integer values from 0 to 1000 (int16)
    Ref counters of rms & mean sensors also take integer values (int16)
    :param data: :pd.DataFrame: raw data
    :return: :pd.DataFrame: compressed raw data
    """
    if (data is not None) and not data.empty:
        c1, c2, c3 = sensors.columns_sensors_rms, sensors.columns_sensors_mean, [sensors.ref_rms, sensors.ref_mean]
        try:
            data_c = data[c1 + c2 + c3].copy()
        except KeyError as e:
            c1, c2, c3 = [list(filter(lambda x: x in data, c)) for c in [c1, c2, c3]]
            log('KeyError in _compress_data: {}; existent columns: {}'.format(e, [c1, c2, c3]), 'error', False)
            data_c = data[c1 + c2 + c3].copy()
        for c in c1:
            data_c[c] = data_c[c].astype('float32')
        for c in c2:
            if data_c[c].dtype != 'int16':
                data_c[c] = pd.Series(1000. * data_c[c]).round().astype('int16')
        for c in c3:
            data_c[c] = data_c[c].astype('int16')
        return data_c
    return data
Esempio n. 13
0
def _compress_data(data, sensors):
    """
    Down-casting of raw data to minimize disk usage.

    RMS sensor data is reduced to float32
    MEAN sensor data take integer values from 0 to 1000 (int16)
    Ref counters of rms & mean sensors also take integer values (int16)
    :param data: :pd.DataFrame: raw data
    :return: :pd.DataFrame: compressed raw data
    """
    if (data is not None) and not data.empty:
        c1, c2, c3 = sensors.columns_sensors_rms, sensors.columns_sensors_mean, [
            sensors.ref_rms, sensors.ref_mean
        ]
        try:
            data_c = data[c1 + c2 + c3].copy()
        except KeyError as e:
            c1, c2, c3 = [
                list(filter(lambda x: x in data, c)) for c in [c1, c2, c3]
            ]
            log(
                'KeyError in _compress_data: {}; existent columns: {}'.format(
                    e, [c1, c2, c3]), 'error', False)
            data_c = data[c1 + c2 + c3].copy()
        for c in c1:
            data_c[c] = data_c[c].astype('float32')
        for c in c2:
            if data_c[c].dtype != 'int16':
                data_c[c] = pd.Series(1000. *
                                      data_c[c]).round().astype('int16')
        for c in c3:
            data_c[c] = data_c[c].astype('int16')
        return data_c
    return data
Esempio n. 14
0
def enerpi_receiver_generator(verbose=False, n_msgs=None):
    """
    Generator of broadcasted values by ENERPI Logger.

    It can be used by any machine in the same network as the ENERPI Logger. It decrypts the encrypted broadcast and
    returns a dict of vars - values.
    Used by the webserver for read & stream real-time values.

    :param verbose: :bool: Log to stdout
    :param n_msgs: :int: # of msgs to receive (unlimited by default).
    :return: :dict:
    """
    gen = receiver_msg_generator(verbose=verbose, n_msgs=n_msgs)
    count = 0
    while True:
        try:
            msg, _t1, _t2 = next(gen)
            yield msg_to_dict(msg)
            count += 1
        except StopIteration:
            log(
                'EXIT from enerpi_receiver_generator. StopIteration in msg #{}'
                .format(count), 'error', verbose)
            break
    return None
Esempio n. 15
0
 def _make_index(self, distribute_existent=True, paths=None):
     df = self._gen_index_entries(paths=paths)
     if distribute_existent and not df.empty:
         if not df[df.is_raw & ~df.is_cat].empty:
             raw_to_distr = df[df.is_raw & ~df.is_cat]
             data = pd.DataFrame(
                 pd.concat([
                     self._load_hdf(p, key=self.key_raw)
                     for p in raw_to_distr['st']
                 ])).sort_index()
             if self.is_raw_data(data):
                 data = self.process_data(data)
             mod_paths = self._distribute_data(data, mode='a')
             for p in raw_to_distr['st']:
                 if not self.test_mode:  # Backup only in normal run
                     p_bkp = os.path.join(self.base_path, DIR_BACKUP, p)
                     os.makedirs(os.path.dirname(p_bkp), exist_ok=True)
                     shutil.copyfile(os.path.join(self.base_path, p), p_bkp)
                 os.remove(os.path.join(self.base_path, p))
             df = df.set_index('st').drop(mod_paths,
                                          errors='ignore').reset_index()
             df_2 = self._gen_index_entries(paths=mod_paths)
             df_sts_drop = df[df['st'].apply(
                 lambda x: pd.Series(raw_to_distr['st'] == x).any())]
             df = pd.concat([df.drop(df_sts_drop.index), df_2])
         else:
             log('No hay stores que distribuir', 'debug', self.verbose)
         return df
     elif not df.empty:
         return df
     return None
Esempio n. 16
0
 def _make_index(self, distribute_existent=True, paths=None):
     df = self._gen_index_entries(paths=paths)
     if distribute_existent and not df.empty:
         if not df[df.is_raw & ~df.is_cat].empty:
             raw_to_distr = df[df.is_raw & ~df.is_cat]
             data = pd.DataFrame(pd.concat([self._load_hdf(p, key=self.key_raw)
                                            for p in raw_to_distr['st']])).sort_index()
             if self.is_raw_data(data):
                 data = self.process_data(data)
             mod_paths = self._distribute_data(data, mode='a')
             for p in raw_to_distr['st']:
                 if not self.test_mode:  # Backup only in normal run
                     p_bkp = os.path.join(self.base_path, DIR_BACKUP, p)
                     os.makedirs(os.path.dirname(p_bkp), exist_ok=True)
                     shutil.copyfile(os.path.join(self.base_path, p), p_bkp)
                 os.remove(os.path.join(self.base_path, p))
             df = df.set_index('st').drop(mod_paths, errors='ignore').reset_index()
             df_2 = self._gen_index_entries(paths=mod_paths)
             df_sts_drop = df[df['st'].apply(lambda x: pd.Series(raw_to_distr['st'] == x).any())]
             df = pd.concat([df.drop(df_sts_drop.index), df_2])
         else:
             log('No hay stores que distribuir', 'debug', self.verbose)
         return df
     elif not df.empty:
         return df
     return None
Esempio n. 17
0
def _web_edit_enerpi_encryption_key(key_file_lines):
    """
    Return dict for user edition of encryption key
    Make Ordered dict like:
    ==> [(section_name,
            OrderedDict([(VARIABLE, (VALUE, 'text', comment=None))])]

    :param key_file_lines: Text lines of encryption key file (1)
    :return: :tuple of (lines_file_for_webview, dict_file_for_webform):

    """
    try:
        assert len(key_file_lines) == 1
        key = key_file_lines[0]
    except AssertionError:
        msg = 'ERROR Reading CRYPTO Key (incorrect # of lines): {}'.format(
            key_file_lines)
        log(msg, 'error', False)
        return False, {'error': msg}
    try:
        _ = get_codec(key.encode())
    except AssertionError as e:
        msg = 'ASSERT WITH CRYPTO_KEY: {}, KEY="{}"'.format(e, key)
        log(msg, 'error', False)
        return False, {'error': msg}
    t, sub = TITLE_EDIT_CRYPTOKEY, SUBTITLE_EDIT_CRYPTOKEY
    return [key], OrderedDict([(t, OrderedDict([(sub, (key, 'text', None))]))])
Esempio n. 18
0
def _web_edit_enerpi_sensors_json(lines_json):
    """
    * Get JSON sensors config for web user edition:

    Make Ordered dict like:
    ==> [(section_name,
            OrderedDict([(VARIABLE, (VALUE, 'int|float|bool|text', comment)),
                         (VARIABLE, (VALUE, 'int|float|bool|text', comment)),
                         ...
                         (VARIABLE, (VALUE, 'int|float|bool|text', comment))])),
         (section_name,
            OrderedDict([(VARIABLE, (VALUE, 'int|float|bool|text', comment)),
                         (VARIABLE, (VALUE, 'int|float|bool|text', comment)),
                         ...
                         (VARIABLE, (VALUE, 'int|float|bool|text', comment))])),
         ...]

    :param lines_json: Lines of JSON file
    :return: :tuple of (ok, dict_file_for_webform):

    """
    # TODO JSON EDITOR
    try:
        sensors_json = json.loads('\n'.join(lines_json), encoding=ENCODING)
        t, sub = TITLE_EDIT_JS_SENSORS, SUBTITLE_EDIT_JS_SENSORS
        d_conf = OrderedDict([(t,
                               OrderedDict([(sub, (json.dumps(sensors_json,
                                                              indent=1),
                                                   'text', None))]))])
        return True, d_conf
    except json.decoder.JSONDecodeError as e:
        msg = 'JSONDecodeError reading new SENSORS JSON: {} --> {}'.format(
            e, lines_json)
        log(msg, 'error')
        return False, {'error': msg}
Esempio n. 19
0
def billing_data(start=None, end=None):
    """
    Stream the billing data to make a report.
    Used for load/reload report from user queries.

    :param start: :str: start datetime of data report
    :param end: :str: end datetime of data report

    :return: SSE stream response

    """
    kwargs = dict(start=start, end=end, cups=BILLING_DATA['cups'])
    if 'potencia' in request.args:
        kwargs.update(potencia_contratada=float(request.args['potencia']))
    if 'bono_social' in request.args:
        kwargs.update(
            con_bono_social=request.args['bono_social'].lower() == 'true')
    if 'impuestos' in request.args:
        zonas = list(DATOS_ZONAS_IMPUESTOS)
        kwargs.update(zona_impuestos=zonas[int(request.args['impuestos'])])
    if 'peaje' in request.args:
        peajes = list(DATOS_TIPO_PEAJE)
        kwargs.update(tipo_peaje=peajes[int(request.args['peaje'])])
    log('BILLING_DATA: {}'.format(kwargs), 'debug', False)
    return Response(_gen_stream_data_factura(**kwargs),
                    mimetype='text/event-stream')
Esempio n. 20
0
def write_fig_to_svg(fig, name_img, preserve_ratio=False):
    """
    Write matplotlib figure to disk in SVG format.

    :param matplotlib.figure fig: figure to export
    :param str name_img: desired image path
    :param bool preserve_ratio: preserve size ratio on the SVG file (default: False)
    :return: operation ok
    :rtype: bool

    """
    canvas = FigureCanvas(fig)
    output = BytesIO()
    imgformat = 'svg'
    canvas.print_figure(output, format=imgformat, transparent=True)
    svg_out = output.getvalue()
    if not preserve_ratio:
        svg_out = REGEXPR_SVG_WIDTH.sub(
            ' width="100%" preserveAspectRatio="none"',
            REGEXPR_SVG_HEIGHT.sub('<svg height="100%"',
                                   svg_out.decode())).encode()
    try:
        with open(name_img, 'wb') as f:
            f.write(svg_out)
    except OSError as e:
        log('OSError writing SVG on disk: {} [{}]'.format(e, e.__class__),
            'error', True)
        return False
    return True
Esempio n. 21
0
 def _classify_data(self, df, func_save_data):
     paths = []
     ahora = dt.datetime.now()
     gb_años = df.groupby(pd.TimeGrouper(freq='A'))
     for ts_year, d_year in gb_años:
         if not d_year.empty:
             gb_meses = d_year.groupby(pd.TimeGrouper(freq='M'))
             for ts_month, d_month in gb_meses:
                 if not d_month.empty:
                     if (ts_year.year == ahora.year) and (ts_month.month == ahora.month):
                         # CURRENT MONTH
                         gb_dias = d_month.groupby(pd.TimeGrouper(freq='D', closed='left', label='left'))
                         for ts_day, d_day in gb_dias:
                             if not d_day.empty:
                                 if ts_day.day == ahora.day:
                                     # TODAY
                                     func_save_data(ST_TODAY, d_day, None, None)
                                     paths.append(ST_TODAY)
                                 else:
                                     # ARCHIVE DAY
                                     p = _make_index_path(ts_day, w_day=True)
                                     log('# ARCHIVE DAY {:%Y-%m-%d} -> {}'.format(ts_day, p), 'debug', self.verbose)
                                     c_day = self.process_data_summary(d_day)
                                     func_save_data(p, d_day, c_day, None)
                                     paths.append(p)
                     else:
                         # ARCHIVE MONTH
                         p = _make_index_path(ts_month, w_day=False)
                         log('# ARCHIVE MONTH --> {}. GOING TO process_data_summary_extra'.format(p),
                             'debug', self.verbose)
                         c_month, c_month_extra = self.process_data_summary_extra(d_month)
                         func_save_data(p, d_month, c_month, c_month_extra)
                         paths.append(p)
     return list(sorted(paths))
Esempio n. 22
0
def _web_edit_enerpi_encryption_key(key_file_lines):
    """
    Return dict for user edition of encryption key
    Make Ordered dict like:
    ==> [(section_name,
            OrderedDict([(VARIABLE, (VALUE, 'text', comment=None))])]

    :param key_file_lines: Text lines of encryption key file (1)
    :return: :tuple of (lines_file_for_webview, dict_file_for_webform):

    """
    try:
        assert len(key_file_lines) == 1
        key = key_file_lines[0]
    except AssertionError:
        msg = 'ERROR Reading CRYPTO Key (incorrect # of lines): {}'.format(key_file_lines)
        log(msg, 'error', False)
        return False, {'error': msg}
    try:
        _ = get_codec(key.encode())
    except AssertionError as e:
        msg = 'ASSERT WITH CRYPTO_KEY: {}, KEY="{}"'.format(e, key)
        log(msg, 'error', False)
        return False, {'error': msg}
    t, sub = TITLE_EDIT_CRYPTOKEY, SUBTITLE_EDIT_CRYPTOKEY
    return [key], OrderedDict([(t, OrderedDict([(sub, (key, 'text', None))]))])
Esempio n. 23
0
 def reprocess_all_data(self):
     """
     Load & reprocess all data in catalog.
     Useful when changing summary data construction (for version changes)
     :return: ok operation
     :rtype: bool
     """
     if self.tree is not None:
         paths_w_summary = self.tree[(self.tree.key == self.key_summary)
                                     & self.tree.is_cat]
         for path in paths_w_summary.st:
             df = self.process_data(self._load_store(path))
             df_s = self.process_data_summary(df)
             if (df is not None) and not df.empty:
                 self._save_hdf([df, df_s],
                                path, [self.key_raw, self.key_summary],
                                mode='w',
                                **KWARGS_SAVE)
         path_today = self.tree[self.tree.st.str.contains(ST_TODAY)
                                & self.tree.is_cat]
         if not path_today.empty:
             path = path_today.st.iloc[0]
             df = self.process_data(self._load_store(path))
             if (df is not None) and not df.empty:
                 self._save_hdf(df,
                                path,
                                self.key_raw,
                                mode='w',
                                **KWARGS_SAVE)
         self.tree = self._get_index(check_index=True)
         return True
     else:
         log('No data to reprocess!', 'error', self.verbose)
         return False
Esempio n. 24
0
def internal_server_error(e):
    """
    Error 500
    :param e: error

    """
    log('INTERNAL_SERVER_ERROR: {}, request={}'.format(e, request), 'error')
    return render_template('error.html', error_e=e, code=500, traceback=None), 500
Esempio n. 25
0
def page_not_found(error):
    """
    Error 404
    :param error: error

    """
    log('page_not_found: {}, url={}'.format(error, request.url), 'error', False)
    return render_template('error.html', error_e=error, code=404, traceback=None), 404
Esempio n. 26
0
 def _get_broadcast_socket(verb=False):
     sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM,
                          socket.IPPROTO_UDP)
     # sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 32)
     sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
     sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
     log(_DESCRIPTION_IO.format(_UDP_IP, port), 'ok', verb, verb)
     return sock
Esempio n. 27
0
def internal_server_error(e):
    """
    Error 500
    :param e: error

    """
    log('INTERNAL_SERVER_ERROR: {}, request={}'.format(e, request), 'error')
    return render_template('error.html', error_e=e, code=500,
                           traceback=None), 500
Esempio n. 28
0
def _loop_rsc_generator(catalog, sleep_time, num_times, verbose=False):
    log(
        '**RESOURCE_GENERATOR LOOP desde MULE con PID={}!!!'.format(
            os.getpid()), 'debug', False)
    counter = 0
    while (counter < num_times) or (num_times == 0):
        gen_ok, took = _rsc_generator(catalog, verbose=verbose)
        counter += 1
        sleep(max(sleep_time - took, 5))
Esempio n. 29
0
def _rsc_generator(catalog, verbose=False):
    tic, ok = time(), False
    log('**RESOURCE_GENERATOR desde MULE con PID={}!!!'.format(os.getpid()),
        'debug', verbose)
    ok = gen_svg_tiles(IMG_TILES_BASEPATH, catalog, last_hours=(72, 48, 24))
    toc = time()
    log('(MULE) TILES generation ok? {}. TOOK {:.3f} s'.format(ok, toc - tic),
        'info', verbose)
    return ok, toc - tic
Esempio n. 30
0
def remote_data_get(t0,
                    tf=None,
                    enerpi_ip='192.168.1.52',
                    port=80,
                    prefix_remote_enerpi='/enerpi',
                    verbose=True):
    """
    Query a ENERPI catalog in a remote machine with enerpiweb running.

    :param t0: start of slice
    :param tf:  end of slice (or None for end = now)
    :param enerpi_ip: IP of the remote machine
    :param port: PORT of the remote enerpiweb server
    :param prefix_remote_enerpi: URL prefix of the remote enerpiweb server
    :param verbose: :bool: verbose mode
    :return: (data_key, pd.DataFrame of sliced values) pairs
    :rtype: dict

    """
    def _request_extract_enerpi_data_store(url_api_download_st):
        dest_path = os.path.join(tempfile.gettempdir(), 'temp_store.h5')
        'application/octet-stream'
        data = _request_enerpi_remote_file(
            url_api_download_st,
            dest_path,
            file_mimetype='application/octet-stream')
        if data:
            # Return content & remove hdf temporal file store:
            with pd.HDFStore(dest_path, 'r') as st:
                data = {k: st[k] for k in st.keys()}
                log('HDF Store downloaded:\n{}'.format(st), 'ok', verbose,
                    False)
            os.remove(dest_path)
            return data
        return None

    paths = get_catalog_paths(t0, tf)
    url_mask = 'http://{}:{}{}/api/hdfstores/'.format(
        enerpi_ip, port, prefix_remote_enerpi) + '{}'
    data_stores = []
    for p in paths:
        url = url_mask.format(os.path.split(p)[-1])
        log('REQUEST HDF STORE AT: {}'.format(url), 'info', verbose, False)
        data_i = _request_extract_enerpi_data_store(url)
        if data_i is not None:
            data_stores.append(data_i)
    keys = set([k for d in data_stores for k in d.keys()])
    data_out = {
        k: pd.DataFrame(
            pd.concat([
                data[k].loc[t0:tf] for data in data_stores if k in data.keys()
            ])).sort_index()
        for k in keys
    }
    return data_out
Esempio n. 31
0
def _get_png_tile_background(bg_path, size, c_out, c_in):
    if os.path.exists(bg_path):
        png_img = Image.open(bg_path)
        loaded_size = (png_img.width, png_img.height)
        if loaded_size == size:
            return png_img
        log('Diferent size in PNG TILE: {} == {} -> {}'.format(size, loaded_size, size == loaded_size), 'warn')
    # Generate new tile background
    png_img = _make_radial_gradient_background(out_color=c_out, in_color=c_in, imgsize=size, center=(.3, .3))
    png_img.save(bg_path)
    return png_img
Esempio n. 32
0
def _check_store_relpath(path_st):
    if os.path.pathsep not in path_st:
        path_st = os.path.join(DATA_PATH, path_st)
    else:
        path_st = os.path.abspath(path_st)
    if not os.path.splitext(path_st)[1]:
        path_st += '.h5'
    existe_st = os.path.exists(path_st)
    if not existe_st:
        log('HDF Store not found at "{}"'.format(path_st), 'warn', True)
    return existe_st, path_st
Esempio n. 33
0
def page_not_found(error):
    """
    Error 404
    :param error: error

    """
    log('page_not_found: {}, url={}'.format(error, request.url), 'error',
        False)
    return render_template('error.html',
                           error_e=error,
                           code=404,
                           traceback=None), 404
Esempio n. 34
0
 def _save_distributed_data(p, d1, d2, d3):
     f = list(filter(lambda x: x[0] is not None, zip([d1, d2, d3],
                                                     [self.key_raw, self.key_summary, self.key_summary_extra])))
     dfs, keys = list(zip(*f))[0], list(zip(*f))[1]
     p_abs = os.path.join(self.base_path, p)
     if mode == 'a' and os.path.exists(p_abs):  # 1º se lee, se concatena, y se eliminan duplicados
         log('** Leyendo información previa del STORE: {}'.format(p_abs), 'debug', self.verbose)
         old_dfs = self._load_hdf(p, func_store=lambda st: [st[k] for k in keys])
         dfs = [pd.DataFrame(pd.concat([old, df])
                             ).sort_index().reset_index().drop_duplicates(subset='ts').set_index('ts')
                for old, df in zip(old_dfs, dfs)]
     self._save_hdf(dfs, p, keys, mode='w', **KWARGS_SAVE)
Esempio n. 35
0
def delete_log_file(log_file, verbose=True):
    """
    Removes (logging) file from disk.

    :param log_file: :str: logging file path
    :param verbose: :bool: shows logging msgs in stdout.
    """
    if os.path.exists(log_file) and os.path.isfile(log_file):
        log('Deleting LOG FILE in {} ...'.format(log_file), 'warn', verbose, False)
        os.remove(log_file)
        return True
    return False
Esempio n. 36
0
def _concat_loaded_data(dataframes, ini, fin=None, verbose=False):
    try:
        valid_dfs = list(filter(lambda df: df is not None, dataframes))
        if valid_dfs:
            dataframe = pd.DataFrame(pd.concat(valid_dfs)).sort_index()
            if fin is not None:
                return dataframe.loc[ini:fin]
            return dataframe.loc[ini:]
        else:
            log('GET DATA -> No valid dfs ({}): {})'.format(len(dataframes), dataframes), 'warn', verbose)
    except ValueError as e:
        log('GET DATA ERROR: {}'.format(e), 'error', verbose)
    return None
Esempio n. 37
0
 def _load_index(self):
     p = os.path.join(self.base_path, self.catalog_file)
     try:
         index = pd.read_csv(p, index_col=0, parse_dates=['ts_ini', 'ts_fin', 'ts_st'])
         index.cols = index.cols.map(lambda x: json.loads(x.replace("'", '"')))
         return index.drop_duplicates(subset=['st', 'key', 'ts_st', 'ts_ini', 'ts_fin'])
     except (FileNotFoundError, OSError) as e:
         log('FileNotFoundError reading HDFTSCAT INDEX in "{}" [{}]. Rebuilding index...'
             .format(p, e), 'error', self.verbose)
     except AttributeError:
         log('AttributeError reading HDFTSCAT INDEX in "{}". Corrupted?\n{}\n ...Rebuilding index...'
             .format(p, open(p, 'r').readlines()), 'error', self.verbose)
     return None
Esempio n. 38
0
 def update_catalog(self, data=None):
     """
     Execute this function periodically, within the raw data generator process, to maintain the catalog updated.
     This function reads the temporal data store, adds it to the catalog, and deletes (recreates) the temporal store.
     """
     temp_data = data if data is not None else self._load_hdf(self.raw_store, self.key_raw)
     new_data = self.archive_periodic(new_data=temp_data, reload_index=True)
     if new_data:
         p = os.path.join(self.base_path, self.raw_store)
         with pd.HDFStore(p, 'w'):
             info = 'Temporal data has been archived. Reset of "{}" is done. Store new size: {:.1f} KB'
             log(info.format(p, os.path.getsize(p) / 1000), 'debug', self.verbose)
         return True
     return False
Esempio n. 39
0
def get_ts_last_save(path_st=HDF_STORE_PATH, get_last_sample=False, verbose=True, n=3):
    """
    Returns last data timestamp in hdf store.

    :param path_st: :str: hdf store file path
    :param get_last_sample: :bool: returns ts or pd.DataFrame
    :param verbose: :bool: shows logging msgs in stdout.
    :param n: :int: # of tail rows
    :return: pd.Timestamp or pd.DataFrame
    """
    tic = time()
    try:
        ts = dt.datetime.fromtimestamp(os.path.getmtime(path_st))
        size_kb = os.path.getsize(path_st) / 1000
        if get_last_sample:
            with pd.HDFStore(path_st, mode='r') as st:
                try:
                    df = st.select(KEY, start=-n)
                    log('Store UPDATE: {:%c} , SIZE = {:.2f} KB. TOOK {:.3f} s'.format(ts, size_kb, time() - tic),
                        'debug', verbose)
                    return df
                except KeyError:
                    log('ERROR: Data "{}" not found in store "{}"'.format(KEY, path_st), 'err', True)
                    return ts
        log('Store UPDATE: {:%c} , SIZE = {:.2f} KB. TOOK {:.3f} s'.format(ts, size_kb, time() - tic), 'debug', verbose)
        return ts
    except FileNotFoundError:
        log('ERROR: Store not found in {}'.format(path_st), 'err', True)
        return None
Esempio n. 40
0
 def run(self):
     """ main control loop """
     log('**INIT_BROADCAST_RECEIVER en PID={}'.format(os.getpid()), 'info',
         False)
     gen = enerpi_receiver_generator()
     count = 0
     while not self._stopevent.isSet():
         try:
             last = next(gen)
             global last_data
             global buffer_last_data
             last_data = last.copy()
             buffer_last_data.append(last_data)
             # print('DEBUG STREAM: last_data --> ', last_data)
         except StopIteration:
             log('StopIteration on counter={}'.format(count), 'debug',
                 False)
             if count > 0:
                 sleep(2)
                 gen = enerpi_receiver_generator()
             else:
                 log('Not receiving broadcast msgs. StopIteration at init!',
                     'error', False)
                 break
         count += 1
         sleep(.5)
     log(
         '**BROADCAST_RECEIVER en PID={}, thread={}. CLOSED on counter={}'.
         format(os.getpid(), current_thread(), count), 'warn', False)
Esempio n. 41
0
    def export_chunk(self, filename='enerpi_all_data.csv', chunksize=10000):
        """
        Get all samples from ENERPI catalog & export them to CSV
        Perform the task in chunks, in order to avoid too much memory usage (in RPI)

        :param str filename: destination path of CSV file
        :param int chunksize: Chunk size reading pandas HDFStores to export
        :return: ok
        :rtype: bool

        """
        path_export = os.path.join(self.base_path, filename)
        if self.tree is not None:
            stores_export = self.tree[self.tree.is_raw][['st', 'n_rows']]
            log('EXPORT DATA FROM:\n{}'.format(stores_export), 'magenta', self.verbose, False)
            init = False
            if stores_export.n_rows.sum() > 0:
                for _, (p, n_rows) in stores_export.iterrows():
                    p_st = os.path.join(self.base_path, p)
                    log('EXPORT {} ROWS FROM STORE: {}'.format(n_rows, p_st), 'info', self.verbose, False)
                    start = 0
                    with pd.HDFStore(p_st, 'r') as st:
                        while start < n_rows:
                            log('READING & EXPORTING ROWS FROM {} TO {} IN {}'.format(start, start + chunksize, p),
                                'info', self.verbose, False)
                            chunk_data = st.select(self.key_raw, start=start, stop=start + chunksize)
                            if not init:
                                chunk_data.to_csv(path_export, header=True, mode='w')
                                init = True
                            else:
                                chunk_data.to_csv(path_export, header=False, mode='a')
                            start += chunksize
                return True
        log('NO DATA TO EXPORT! CATALOG:\n{}'.format(self.tree), 'error', self.verbose)
        return False
Esempio n. 42
0
def get_rgbled(verbose=True):
    """Tries to get gpiozero RGBLED object at pins PIN_R|G|B if WITH_RGBLED in config file"""
    led = None
    if WITH_RGBLED:
        try:
            from gpiozero import RGBLED
            from gpiozero.pins.rpigpio import RPiGPIOPin

            led = RGBLED(RPiGPIOPin(PIN_R), RPiGPIOPin(PIN_G), RPiGPIOPin(PIN_B), active_high=True)
            led.blink(.25, .25, on_color=(1, 1, 1), n=5)
        except (OSError, RuntimeError, ImportError) as e:
            log('** Not using RGBLED with GPIOZERO ({} [{}]). Check your "{}" file.'
                .format(e, e.__class__, CONFIG_FILENAME), 'warn', verbose)
    return led
Esempio n. 43
0
def _gen_stream_data_bokeh(start=None,
                           end=None,
                           last_hours=None,
                           rs_data=None,
                           use_median=False,
                           kwh=False,
                           columns=None):
    tic = time()
    if start or end or last_hours:
        cat = enerpi_data_catalog(check_integrity=False)
        if kwh:
            df = cat.get_summary(start=start, end=end, last_hours=last_hours)
        else:
            df = cat.get(start=start, end=end, last_hours=last_hours)
            if (df is not None) and not df.empty:
                if last_hours is not None:
                    df_last_data = _get_dataframe_buffer_data()
                    if df_last_data is not None:
                        df_last_data = df_last_data.tz_localize(None)
                        df = pd.DataFrame(pd.concat([
                            df, df_last_data
                        ])).sort_index().drop_duplicates(keep='last')
                df = cat.resample_data(df,
                                       rs_data=rs_data,
                                       use_median=use_median)
    else:
        df = _get_dataframe_buffer_data()
    toc_df = time()
    if df is not None and not df.empty:
        script, divs, version = html_plot_buffer_bokeh(df,
                                                       is_kwh_plot=kwh,
                                                       columns=columns)
        toc_p = time()
        log(
            'Bokeh plot gen in {:.3f} s; pd.df in {:.3f} s.'.format(
                toc_p - toc_df, toc_df - tic), 'debug', False)
        yield _format_event_stream(
            dict(success=True,
                 b_version=version,
                 script_bokeh=script,
                 bokeh_div=divs[0],
                 took=round(toc_p - tic, 3),
                 took_df=round(toc_df - tic, 3)))
    else:
        msg = ('No data for BOKEH PLOT: start={}, end={}, last_hours={}, '
               'rs_data={}, use_median={}, kwh={}<br>--> DATA: {}'.format(
                   start, end, last_hours, rs_data, use_median, kwh, df))
        yield _format_event_stream(dict(success=False, error=msg))
    yield _format_event_stream('CLOSE')
Esempio n. 44
0
def _web_post_changes_enerpi_encryption_key(dict_web_form,
                                            lines_keyfile,
                                            dict_key_file,
                                            dest_filepath=None):
    """
    Process changes in web editor of ENERPI BROADCAST ENCRYPTION KEY
    # Secret Key edition!! NOT SAFE AT ALL!! (Debugging purposes)

    :param dict_web_form: :OrderedDict: Posted Form with new crypto key
    :param lines_keyfile: :list: original file text lines (1) with original cryto key
    :param dict_key_file: :OrderedDict: original config dict of dicts
                            (like the one 'web_edit_enerpi_encryption_key' returns)
    :param dest_filepath: :str: (optional) destination filepath for save configuration changes
    :return: :tuple: (:dict: alert message, :list: text lines, :OrderedDict: updated dict_config))

    """
    t, sub = TITLE_EDIT_CRYPTOKEY, SUBTITLE_EDIT_CRYPTOKEY
    ant_key = dict_key_file[t][sub][0]
    new_key = dict_web_form[sub]
    alerta = {}
    if new_key != ant_key:
        try:  # Validate new Fernet Key
            _ = get_codec(new_key.encode())
            if len(new_key) > 10:
                if dest_filepath is not None:
                    log(
                        'The new KEY ("{}")\nwill be saved in "{}"\n'.format(
                            new_key, dest_filepath), 'warning', False)
                    with open(dest_filepath, 'wb') as f:
                        f.write(new_key.encode())
                str_cambios = (
                    'Configuration changes in encryption key:<br>{}<br> New config SAVED!'
                    .format(
                        '- <strong>"{}"</strong> (before={}) -> <strong>"{}"</strong>'
                        .format(SUBTITLE_EDIT_CRYPTOKEY, ant_key, new_key)))
                log(str_cambios, 'debug', False)
                alerta = {'alert_type': 'warning', 'texto_alerta': str_cambios}
                lines_keyfile = [new_key]
                dict_key_file = OrderedDict([
                    (t, OrderedDict([(sub, (new_key, 'text', None))]))
                ])
        except AssertionError as e:
            alerta = {
                'alert_type':
                'danger',
                'texto_alerta':
                'Not a valid KEY: {}. New KEY was: {}'.format(e, new_key)
            }
    return alerta, lines_keyfile, dict_key_file
Esempio n. 45
0
def download_hdfstore_file(relpath_store=None):
    """
    Download HDFStore file from ENERPI (*.h5 file)

    :param str relpath_store: HDF store filename

    """
    cat = enerpi_data_catalog(check_integrity=False)
    path_file = cat.get_path_hdf_store_binaries(relpath_store)
    log('download_hdfstore_file with path_file: "{}", relpath: "{}"'.format(path_file, relpath_store), 'debug', False)
    if (path_file is not None) and os.path.exists(path_file):
        if 'as_attachment' in request.args:
            return send_file(path_file, as_attachment=True, attachment_filename=os.path.basename(path_file))
        return send_file(path_file, as_attachment=False)
    return abort(404)
Esempio n. 46
0
def _get_pb_obj():
    # Lazy load
    global PBOBJ

    if PBOBJ is None:
        try:
            pushbullet_token = CONFIG.get('NOTIFY', 'PUSHBULLET_TOKEN')
            if not pushbullet_token:
                raise InvalidKeyError
            PBOBJ = Pushbullet(pushbullet_token)
        except (NoOptionError, NoSectionError, InvalidKeyError) as e:
            log('NO Pushbullet config ({} [{}])'
                .format(e, e.__class__), 'error', False)
            PBOBJ = None
    return PBOBJ
Esempio n. 47
0
def _concat_loaded_data(dataframes, ini, fin=None, verbose=False):
    try:
        valid_dfs = list(filter(lambda df: df is not None, dataframes))
        if valid_dfs:
            dataframe = pd.DataFrame(pd.concat(valid_dfs)).sort_index()
            if fin is not None:
                return dataframe.loc[ini:fin]
            return dataframe.loc[ini:]
        else:
            log(
                'GET DATA -> No valid dfs ({}): {})'.format(
                    len(dataframes), dataframes), 'warn', verbose)
    except ValueError as e:
        log('GET DATA ERROR: {}'.format(e), 'error', verbose)
    return None
Esempio n. 48
0
def ts_strftime(ts):
    """
    strftime jinja2 template filter

    :param ts: :datetime: datetime object
    :return: :str: datestr

    """
    try:
        if (ts.hour == 0) and (ts.minute == 0):
            return ts.strftime('%d/%m/%y')
        return ts.strftime('%d/%m/%y %H:%M')
    except AttributeError as e:
        log('AttributeError en template_filter:ts_strftime -> {}'.format(e), 'error', False)
        return str(ts)
Esempio n. 49
0
def startstop(service='enerpi_start'):
    """Endpoint for control ENERPI in RPI. Only for dev mode.

    It can restart the ENERPI daemon logger or even reboot the machine
    for a fresh start after a config change.

    :param service: service id ('enerpi_start/stop' for operate
                    with the logger, or 'machine' for a reboot)

    """
    def _system_operation(command):
        log('SYSTEM_OPERATION CMD: "{}"'.format(command), 'debug', False)
        os.system(command)

    form = DummyForm()
    cmd = msg = alert = None
    if form.validate_on_submit():
        if service == 'enerpi_start':
            python_pathbin = os.path.dirname(sys.executable)
            cmd = '{}/enerpi-daemon start'.format(python_pathbin)
            msg = 'Starting ENERPI logger from webserver... ({})'.format(cmd)
            alert = 'warning'
        elif service == 'enerpi_stop':
            python_pathbin = os.path.dirname(sys.executable)
            cmd = '{}/enerpi-daemon stop'.format(python_pathbin)
            msg = 'Stopping ENERPI logger from webserver... ({})'.format(cmd)
            alert = 'danger'
        elif service == 'machine':
            cmd = 'sudo /sbin/reboot now'
            msg = 'Rebooting! MACHINE... see you soon... ({})'.format(cmd)
            alert = 'danger'
        elif service == 'machine_off':
            cmd = 'sudo /sbin/shutdown now'
            msg = 'Shutdown NOW!!!... Wait some time ' \
                  'to turn power off... ({})'.format(cmd)
            alert = 'danger'
        if cmd is not None:
            log(msg, 'debug', False)
            t = Timer(.5, _system_operation, args=(cmd, ))
            t.start()
            return redirect(
                url_for('control',
                        after_sysop=True,
                        alerta=json.dumps({
                            'alert_type': alert,
                            'texto_alerta': msg
                        })))
    return abort(500)
Esempio n. 50
0
def broadcast_msg(msg,
                  counter_unreachable,
                  sock_send=None,
                  codec=None,
                  port=None,
                  verbose=True):
    """
    Emisión de datos en modo broadcast UDP (para múltiples receptores) como mensaje de texto encriptado.
    :param msg: Cadena de texto a enviar.
    :param counter_unreachable: np.array([0, 0]) de control de emisiones incorrectas (seguidas y totales)
    :param sock_send: Socket de envío broadcast. Se devuelve para su reutilización.
    :param codec: :Fernet obj: Fernet object for encrypting msgs.
    :param port: # de puerto de retransmisión.
    :param verbose: Imprime en stout mensajes de error y de envío de datos
    :return: sock_send
    """
    def _get_broadcast_socket(verb=False):
        sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM,
                             socket.IPPROTO_UDP)
        # sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 32)
        sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
        sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
        log(_DESCRIPTION_IO.format(_UDP_IP, port), 'ok', verb, verb)
        return sock

    if codec is None:
        codec = get_codec()
    if port is None:
        port = CONFIG.getint('BROADCAST', 'UDP_PORT', fallback=57775)
    if sock_send is None:
        sock_send = _get_broadcast_socket()
    encrypted_msg_b = codec.encrypt(msg.encode())
    try:
        sock_send.sendto(encrypted_msg_b, (_UDP_IP, port))
        counter_unreachable[0] = 0
    except OSError as e:  # [Errno 101] Network is unreachable
        if counter_unreachable[0] % 3 == 0:
            log(
                'OSError: {}; C_UNREACHABLE: {}'.format(
                    e, counter_unreachable), 'warn', verbose)
        counter_unreachable += 1
        sock_send = None
    # except Exception as e:
    #     log('ERROR en sendto: {} [{}]'.format(e, e.__class__), 'err', verbose)
    #     sock_send = _get_broadcast_socket()
    #     sock_send.sendto(encrypted_msg_b, (_UDP_IP, UDP_PORT))
    log('SENDED: {}'.format(msg), 'debug', verbose, False)
    return sock_send
Esempio n. 51
0
def get_codec(secret_key=None):
    """
    Get Fernet CODEC from secret_key with an AssertionError if Fernet error raises.

    :param secret_key: :bytes: encryption_key
    :return: Fernet()

    """
    if secret_key is None:
        secret_key = get_encryption_key()
    try:
        codec = Fernet(secret_key)
        return codec
    except Error as error_fernet:
        log('Crypto KEY is not a valid KEY! -> {}.\nKEY="{}". Try again... BYE!'
            .format(error_fernet, secret_key), 'error', True)
        assert 0, 'Crypto KEY is not a valid KEY: {}'.format(error_fernet)
Esempio n. 52
0
def startstop(service='enerpi_start'):
    """Endpoint for control ENERPI in RPI. Only for dev mode.

    It can restart the ENERPI daemon logger or even reboot the machine
    for a fresh start after a config change.

    :param service: service id ('enerpi_start/stop' for operate
                    with the logger, or 'machine' for a reboot)

    """
    def _system_operation(command):
        log('SYSTEM_OPERATION CMD: "{}"'.format(command), 'debug', False)
        os.system(command)

    form = DummyForm()
    cmd = msg = alert = None
    if form.validate_on_submit():
        if service == 'enerpi_start':
            python_pathbin = os.path.dirname(sys.executable)
            cmd = '{}/enerpi-daemon start'.format(python_pathbin)
            msg = 'Starting ENERPI logger from webserver... ({})'.format(cmd)
            alert = 'warning'
        elif service == 'enerpi_stop':
            python_pathbin = os.path.dirname(sys.executable)
            cmd = '{}/enerpi-daemon stop'.format(python_pathbin)
            msg = 'Stopping ENERPI logger from webserver... ({})'.format(cmd)
            alert = 'danger'
        elif service == 'machine':
            cmd = 'sudo /sbin/reboot now'
            msg = 'Rebooting! MACHINE... see you soon... ({})'.format(cmd)
            alert = 'danger'
        elif service == 'machine_off':
            cmd = 'sudo /sbin/shutdown now'
            msg = 'Shutdown NOW!!!... Wait some time ' \
                  'to turn power off... ({})'.format(cmd)
            alert = 'danger'
        if cmd is not None:
            log(msg, 'debug', False)
            t = Timer(.5, _system_operation, args=(cmd,))
            t.start()
            return redirect(
                url_for('control', after_sysop=True,
                        alerta=json.dumps(
                            {'alert_type': alert, 'texto_alerta': msg})))
    return abort(500)