Beispiel #1
0
    def test_9_catalog_index_update(self):
        from enerpi.base import BASE_PATH
        from enerpi.api import enerpi_data_catalog
        # Regen cat_file
        cat_file = os.path.join(self.DATA_PATH, self.cat.catalog_file)
        pp.print_secc('Regeneración de catalog_file: (se elimina "{}" y se crea con check_integrity=True)'
                      .format(cat_file))
        pp.print_cyan(open(cat_file).read())
        os.remove(cat_file)
        new_cat = enerpi_data_catalog(base_path=self.cat.base_path,
                                      raw_file=self.cat.raw_store,
                                      check_integrity=True, verbose=True, test_mode=True)
        pp.print_ok(new_cat)

        # Now with corrupted cat_file & non-existent raw_file
        with open(cat_file, 'a') as f:
            f.write('corrupting data catalog; -1\n')
        new_cat_2 = enerpi_data_catalog(base_path=self.cat.base_path,
                                        raw_file=os.path.join(self.DATA_PATH, 'enerpi_data_non_existent.h5'),
                                        check_integrity=True, verbose=True, test_mode=True)
        pp.print_ok(new_cat_2)

        # Now with distributing data:
        raw_data = self.cat._load_store(os.path.join(BASE_PATH, '..', 'tests', 'rsc',
                                                     'test_update_month', 'enerpi_data_test.h5'))
        archived_data = self.cat._load_store('DATA_YEAR_2016/DATA_2016_MONTH_10.h5')
        assert self.cat.is_raw_data(raw_data)
        assert not self.cat.is_raw_data(archived_data)
        archived_data.index += pd.Timestamp.now() - raw_data.index[-1]
        raw_data.index += pd.Timestamp.now() - raw_data.index[-1]
        print(archived_data.index)
        print(raw_data.index)

        # Delete all hdf stores:
        pp.print_cyan(os.listdir(self.DATA_PATH))
        pp.print_cyan(os.listdir(os.path.join(self.DATA_PATH, 'DATA_YEAR_2016')))
        pp.print_cyan(os.listdir(os.path.join(self.DATA_PATH, 'CURRENT_MONTH')))
        shutil.rmtree(os.path.join(self.cat.base_path, 'DATA_YEAR_2016'))
        shutil.rmtree(os.path.join(self.cat.base_path, 'CURRENT_MONTH'))
        # shutil.rmtree(os.path.join(self.cat.base_path, 'OLD_STORES'))
        # os.remove(os.path.join(self.cat.base_path, self.cat.raw_store))

        # Populate new hdf stores:
        archived_data.to_hdf(os.path.join(self.cat.base_path, 'PROCESSED_DATA_TO_BE_ARCHIVED.h5'), self.cat.key_raw)
        archived_data.to_hdf(os.path.join(self.cat.base_path, 'RAW_DATA_TO_BE_ARCHIVED.h5'), self.cat.key_raw)
        print(os.listdir(self.cat.base_path))

        # New catalog:
        new_cat_3 = enerpi_data_catalog(base_path=self.cat.base_path, check_integrity=True,
                                        verbose=True, test_mode=True)
        pp.print_ok(new_cat_3)
        print(os.listdir(self.cat.base_path))
        print(os.listdir(os.path.join(self.cat.base_path, 'CURRENT_MONTH')))
Beispiel #2
0
def _get_temp_catalog_for_testing(subpath_test_files='test_context_enerpi',
                                  raw_file='enerpi_data_test.h5', check_integrity=True):
    """
    Copy example ENERPI files & sets common data catalog for testing.

    """
    print('TEST DEBUGGING: in get_temp_catalog_for_testing')
    dir_config = os.path.join(BASE_PATH, 'config')
    path_default_datapath = os.path.join(dir_config, '.enerpi_data_path')
    before_tests = open(path_default_datapath).read()

    # Prepara archivos:
    path_files_test = os.path.abspath(os.path.join(BASE_PATH, '..', 'tests', 'rsc', subpath_test_files))
    tmp_dir = tempfile.TemporaryDirectory(prefix='ENERPIDATA_test')
    data_path = tmp_dir.name
    open(path_default_datapath, 'w').write(data_path)
    try:
        shutil.copytree(path_files_test, data_path)
    except FileExistsError:
        tmp_dir.cleanup()  # shutil.rmtree(data_path)
        shutil.copytree(path_files_test, data_path)
    # with patch('builtins.input', return_value='1'):
    #     from enerpi.base import reload_config
    #     from enerpi.api import enerpi_data_catalog
    #     cat = enerpi_data_catalog(base_path=data_path, raw_file=raw_file,
    # check_integrity=check_integrity, verbose=True)

    from enerpi.base import reload_config
    reload_config()
    from enerpi.api import enerpi_data_catalog
    cat = enerpi_data_catalog(base_path=data_path, raw_file=raw_file, check_integrity=check_integrity,
                              verbose=True, test_mode=True)

    return tmp_dir, data_path, cat, path_default_datapath, before_tests
Beispiel #3
0
def _get_filepath_from_file_id(file_id):
    # Interesting files / logs to show/edit/download/upload:
    if file_id in ENERPI_CONFIG_FILES:
        return True, False, os.path.join(DATA_PATH, ENERPI_CONFIG_FILES[file_id]['filename'])
    elif 'flask' == file_id:
        is_logfile, filename = True, SERVER_FILE_LOGGING
    elif 'rsc' == file_id:
        is_logfile, filename = True, SERVER_FILE_LOGGING_RSCGEN
    elif 'nginx_err' == file_id:
        is_logfile, filename = True, '/var/log/nginx/error.log'
    elif 'nginx' == file_id:
        is_logfile, filename = True, '/var/log/nginx/access.log'
    elif 'enerpi' == file_id:
        is_logfile, filename = True, FILE_LOGGING
    elif 'uwsgi' == file_id:
        is_logfile, filename = True, '/var/log/uwsgi/{}.log'.format(os.path.splitext(UWSGI_CONFIG_FILE)[0])
    elif 'daemon_out' == file_id:
        is_logfile, filename = True, DAEMON_STDOUT
    elif 'daemon_err' == file_id:
        is_logfile, filename = True, DAEMON_STDERR
    else:  # Fichero derivado del catálogo
        cat = enerpi_data_catalog(check_integrity=False)
        if 'raw_store' == file_id:
            is_logfile, filename = False, os.path.join(cat.base_path, cat.raw_store)
        elif 'catalog' == file_id:
            is_logfile, filename = False, os.path.join(cat.base_path, cat.catalog_file)
        else:
            log('FILE_ID No reconocido: {}'.format(file_id), 'error', False)
            is_logfile, filename = False, SERVER_FILE_LOGGING
            return False, is_logfile, filename
    return True, is_logfile, filename
Beispiel #4
0
def control():
    """
    Admin Control Panel with links to LOG viewing/downloading, hdf_stores download, ENERPI config editor, etc.

    """
    def _text_button_hdfstore(rel_path, ini, fin, nrows):
        name = os.path.basename(os.path.splitext(rel_path)[0])
        ini, fin = ini.strftime('%d/%m/%y'), fin.strftime('%d/%m/%y')
        if ini == fin:
            t_date = ini
        else:
            t_date = '{}->{}'.format(ini, fin)
        text = '''<i class="fa fa-file-archive-o" aria-hidden="true"></i><strong> {}</strong><br>   '''.format(
            name)
        text += '''<small>({}, N={})</small>'''.format(t_date, nrows)
        return text

    is_sender_active, last = stream_is_alive()
    after_sysop = request.args.get('after_sysop', '')
    alerta = request.args.get('alerta', '')
    if alerta:
        alerta = json.loads(alerta)
    cat = enerpi_data_catalog(check_integrity=False)
    df = cat.tree
    if df is not None:
        df = df[df.is_cat & df.is_raw].sort_values(by='ts_ini',
                                                   ascending=False)
        paths_rel = [(os.path.basename(p), _text_button_hdfstore(p, t0, tf, n))
                     for p, t0, tf, n in zip(df['st'], df['ts_ini'],
                                             df['ts_fin'], df['n_rows'])]
    else:
        paths_rel = []
    form_operate = DummyForm()
    return render_template('control_panel.html',
                           d_catalog={
                               'path_raw_store':
                               os.path.join(cat.base_path, cat.raw_store),
                               'path_catalog':
                               os.path.join(cat.base_path, cat.catalog_file),
                               'ts_init':
                               cat.min_ts,
                               'ts_catalog':
                               cat.index_ts
                           },
                           d_last_msg=last,
                           is_sender_active=is_sender_active,
                           list_stores=paths_rel,
                           form_operate=form_operate,
                           after_sysop=after_sysop,
                           alerta=alerta,
                           version=VERSION)
Beispiel #5
0
def _gen_stream_data_bokeh(start=None,
                           end=None,
                           last_hours=None,
                           rs_data=None,
                           use_median=False,
                           kwh=False,
                           columns=None):
    tic = time()
    if start or end or last_hours:
        cat = enerpi_data_catalog(check_integrity=False)
        if kwh:
            df = cat.get_summary(start=start, end=end, last_hours=last_hours)
        else:
            df = cat.get(start=start, end=end, last_hours=last_hours)
            if (df is not None) and not df.empty:
                if last_hours is not None:
                    df_last_data = _get_dataframe_buffer_data()
                    if df_last_data is not None:
                        df_last_data = df_last_data.tz_localize(None)
                        df = pd.DataFrame(pd.concat([
                            df, df_last_data
                        ])).sort_index().drop_duplicates(keep='last')
                df = cat.resample_data(df,
                                       rs_data=rs_data,
                                       use_median=use_median)
    else:
        df = _get_dataframe_buffer_data()
    toc_df = time()
    if df is not None and not df.empty:
        script, divs, version = html_plot_buffer_bokeh(df,
                                                       is_kwh_plot=kwh,
                                                       columns=columns)
        toc_p = time()
        log(
            'Bokeh plot gen in {:.3f} s; pd.df in {:.3f} s.'.format(
                toc_p - toc_df, toc_df - tic), 'debug', False)
        yield _format_event_stream(
            dict(success=True,
                 b_version=version,
                 script_bokeh=script,
                 bokeh_div=divs[0],
                 took=round(toc_p - tic, 3),
                 took_df=round(toc_df - tic, 3)))
    else:
        msg = ('No data for BOKEH PLOT: start={}, end={}, last_hours={}, '
               'rs_data={}, use_median={}, kwh={}<br>--> DATA: {}'.format(
                   start, end, last_hours, rs_data, use_median, kwh, df))
        yield _format_event_stream(dict(success=False, error=msg))
    yield _format_event_stream('CLOSE')
Beispiel #6
0
    def test__10_export_data(self):
        from enerpi.api import enerpi_data_catalog

        print(os.listdir(self.cat.base_path))
        print(os.listdir(os.path.join(self.cat.base_path, 'CURRENT_MONTH')))
        pp.print_red(self.cat.tree)
        self.cat = enerpi_data_catalog(base_path=self.cat.base_path, check_integrity=True, verbose=True, test_mode=True)
        pp.print_ok(self.cat.tree)

        pp.print_magenta(self.cat.tree)
        export_file = 'enerpi_all_data_test_1.csv'
        exported_ok = self.cat.export_chunk(filename=export_file)
        pp.print_cyan(exported_ok)
        self.assertIs(exported_ok, True)
        os.remove(os.path.join(self.DATA_PATH, export_file))
Beispiel #7
0
def download_hdfstore_file(relpath_store=None):
    """
    Download HDFStore file from ENERPI (*.h5 file)

    :param str relpath_store: HDF store filename

    """
    cat = enerpi_data_catalog(check_integrity=False)
    path_file = cat.get_path_hdf_store_binaries(relpath_store)
    log('download_hdfstore_file with path_file: "{}", relpath: "{}"'.format(path_file, relpath_store), 'debug', False)
    if (path_file is not None) and os.path.exists(path_file):
        if 'as_attachment' in request.args:
            return send_file(path_file, as_attachment=True, attachment_filename=os.path.basename(path_file))
        return send_file(path_file, as_attachment=False)
    return abort(404)
Beispiel #8
0
def _get_enerpi_data(start=None, end=None, is_consumption=True):
    if not (start or end):
        start = (dt.datetime.now(tz=SENSORS.TZ) - dt.timedelta(days=7)
                 ).replace(hour=0, minute=0, second=0, microsecond=0)  # 1 week
    else:
        if start and (type(start) is str):
            start = start.replace('_', ' ')
        if end and (type(end) is str):
            end = end.replace('_', ' ')
    cat = enerpi_data_catalog(check_integrity=False)
    if is_consumption:
        df = cat.get_summary(start=start, end=end)
    else:
        df = cat.get(start=start, end=end, column=SENSORS.main_column)
    return df
Beispiel #9
0
def _get_enerpi_data(start=None, end=None, is_consumption=True):
    if not (start or end):
        start = (dt.datetime.now(tz=SENSORS.TZ) -
                 dt.timedelta(days=7)).replace(hour=0,
                                               minute=0,
                                               second=0,
                                               microsecond=0)  # 1 week
    else:
        if start and (type(start) is str):
            start = start.replace('_', ' ')
        if end and (type(end) is str):
            end = end.replace('_', ' ')
    cat = enerpi_data_catalog(check_integrity=False)
    if is_consumption:
        df = cat.get_summary(start=start, end=end)
    else:
        df = cat.get(start=start, end=end, column=SENSORS.main_column)
    return df
Beispiel #10
0
def control():
    """
    Admin Control Panel with links to LOG viewing/downloading, hdf_stores download, ENERPI config editor, etc.

    """
    def _text_button_hdfstore(rel_path, ini, fin, nrows):
        name = os.path.basename(os.path.splitext(rel_path)[0])
        ini, fin = ini.strftime('%d/%m/%y'), fin.strftime('%d/%m/%y')
        if ini == fin:
            t_date = ini
        else:
            t_date = '{}->{}'.format(ini, fin)
        text = '''<i class="fa fa-file-archive-o" aria-hidden="true"></i><strong> {}</strong><br>   '''.format(name)
        text += '''<small>({}, N={})</small>'''.format(t_date, nrows)
        return text

    is_sender_active, last = stream_is_alive()
    after_sysop = request.args.get('after_sysop', '')
    alerta = request.args.get('alerta', '')
    if alerta:
        alerta = json.loads(alerta)
    cat = enerpi_data_catalog(check_integrity=False)
    df = cat.tree
    if df is not None:
        df = df[df.is_cat & df.is_raw].sort_values(
            by='ts_ini', ascending=False)
        paths_rel = [(os.path.basename(p), _text_button_hdfstore(p, t0, tf, n))
                     for p, t0, tf, n in zip(df['st'], df['ts_ini'],
                                             df['ts_fin'], df['n_rows'])]
    else:
        paths_rel = []
    form_operate = DummyForm()
    return render_template(
        'control_panel.html',
        d_catalog={
            'path_raw_store': os.path.join(cat.base_path, cat.raw_store),
            'path_catalog': os.path.join(cat.base_path, cat.catalog_file),
            'ts_init': cat.min_ts, 'ts_catalog': cat.index_ts},
        d_last_msg=last, is_sender_active=is_sender_active,
        list_stores=paths_rel, form_operate=form_operate,
        after_sysop=after_sysop, alerta=alerta, version=VERSION)
Beispiel #11
0
def _gen_stream_data_factura(start=None, end=None, **kwargs_factura):
    tic = time()
    if start is None:
        # Inicio de mes actual hasta instante actual
        start = dt.datetime.now(tz=SENSORS.TZ).replace(day=1,
                                                       hour=0,
                                                       minute=0,
                                                       second=0,
                                                       microsecond=0)
        end = dt.datetime.now(tz=SENSORS.TZ)
    cat = enerpi_data_catalog(check_integrity=False)
    df = cat.get_summary(start=start, end=end)
    toc_df = time()
    if (df is not None) and not df.empty and ('kWh' in df):
        consumption = df['kWh']
        # Fix timezone (ya en esiosdata)
        # try:
        #     consumption.index = consumption.index.tz_localize(SENSORS.TZ, ambiguous='infer')
        # except AmbiguousTimeError as e:
        #     consumption.index = consumption.index.tz_localize(SENSORS.TZ, ambiguous='NaT')
        #     consumption = consumption.reindex(DatetimeIndex(start=consumption.index[0], end=consumption.index[-1],
        #                                                     freq='1h', tz=SENSORS.TZ)).interpolate()
        #     log('AmbiguousTimeError ({}) en elec_bill. Se reindexa e interpola el índice.'.format(e), 'error')
        factura = FacturaElec(consumo=consumption, **kwargs_factura)
        data_factura = factura.to_dict(include_text_repr=True,
                                       include_html_repr=True)
        toc_p = time()
        msg = 'Factura generada en {:.3f} s; datos en {:.3f} s.'.format(
            toc_p - toc_df, toc_df - tic)
        log(msg, 'debug', False)
        log(
            'stream_data_factura: STREAM BILL from "{}" to "{}"'.format(
                start, end), 'debug', False)
        yield _format_event_stream(
            dict(success=True,
                 factura=data_factura,
                 took=round(toc_p - tic, 3),
                 took_df=round(toc_df - tic, 3)))
    elif df is not None:
        factura = FacturaElec(start, end, **kwargs_factura)
        data_factura = factura.to_dict(include_text_repr=True,
                                       include_html_repr=True)
        toc_p = time()
        msg = 'Factura vacía generada en {:.3f} s; datos en {:.3f} s.'.format(
            toc_p - toc_df, toc_df - tic)
        log(msg, 'debug', False)
        log(
            'stream_data_factura: STREAM EMPTY BILL from "{}" to "{}"'.format(
                start, end), 'debug', False)
        yield _format_event_stream(
            dict(success=True,
                 factura=data_factura,
                 error=msg,
                 took=round(toc_p - tic, 3),
                 took_df=round(toc_df - tic, 3)))
    else:
        msg = 'No data from {} to {}. CATALOG:\n{}'.format(start, end, cat)
        log(msg, 'debug', False)
        log(
            'stream_data_factura: STREAM ERR NO DATA from "{}" to "{}"'.format(
                start, end), 'debug', False)
        yield _format_event_stream(
            dict(success=False,
                 error=msg,
                 took=round(time() - tic, 3),
                 took_df=round(toc_df - tic, 3)))
    log(
        'CLOSING stream_data_factura from "{}" to "{}" with args={}'.format(
            start, end, kwargs_factura), 'debug', False)
    yield _format_event_stream('CLOSE')