Пример #1
0
def get_error(msg: str) -> str:
    error_type = str(sys_exc_info()[0]).split("'", 3)[1]

    if msg not in config.NONE_RESULTS:
        error_msg = msg

    else:
        error_msg = sys_exc_info()[1]

    if error_msg in config.NONE_RESULTS:
        error_msg = 'Got no error message.'

    return f"{error_type} => {error_msg}"
Пример #2
0
def _killall(greenlets, exception):
    for g in greenlets:
        if not g.dead:
            try:
                g.throw(exception)
            except:  # pylint:disable=bare-except, undefined-variable
                get_my_hub(g).handle_error(g, *sys_exc_info())
Пример #3
0
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):
   assert isinstance(commands, list)
   # noinspection PyUnusedLocal
   p = None
   for c in commands:
      try:
         # remember shell=False, so use git.cmd on windows, not just git
         p = subprocess_Popen([c] + args, cwd=cwd, stdout=subprocess_PIPE, stderr=(subprocess_PIPE if hide_stderr else None))
         break
      except EnvironmentError:
         e = sys_exc_info()[1]
         if e.errno == ERRNO_ENOENT:
            continue
         if verbose:
            print('unable to run {}'.format(args[0]))
            print(e)
         return None
   else:
      if verbose:
         print('unable to find command, tried {}'.format(commands, ))
      return None
   stdout = p.communicate()[0].strip()
   if sys_version >= '3':
      stdout = stdout.decode()
   if p.returncode != 0:
      if verbose:
         print('unable to run {} (error)'.format(args[0]))
      return None
   return stdout
Пример #4
0
def _killall(greenlets, exception):
    for g in greenlets:
        if not g.dead:
            try:
                g.throw(exception)
            except: # pylint:disable=bare-except, undefined-variable
                get_my_hub(g).handle_error(g, *sys_exc_info())
Пример #5
0
def _killall(greenlets, exception):
    for g in greenlets:
        if not g.dead:
            try:
                g.throw(exception)
            except:  # pylint:disable=bare-except
                g.parent.handle_error(g, *sys_exc_info())
Пример #6
0
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):
    assert isinstance(commands, list)
    # noinspection PyUnusedLocal
    p = None
    for c in commands:
        try:
            p = subprocess_Popen([c] + args,
                                 cwd=cwd,
                                 stdout=PIPE,
                                 stderr=(PIPE if hide_stderr else None))
            break
        except EnvironmentError:
            e = sys_exc_info()[1]
            if e.errno == ENOENT:
                continue
            if verbose:
                print(('unable to run {}'.format(args[0])))
                print(e)
            return None
    else:
        if verbose:
            print(('unable to find command, tried {}'.format(commands)))
        return None
    stdout = p.communicate()[0].strip()
    if sys_version >= '3':
        stdout = stdout.decode()
    if p.returncode != 0:
        if verbose:
            print(('unable to run {} (error)'.format(args[0])))
        return None
    return stdout
Пример #7
0
def _killall(greenlets, exception):
    for g in greenlets:
        if not g.dead:
            try:
                g.throw(exception)
            except: # pylint:disable=bare-except
                g.parent.handle_error(g, *sys_exc_info())
Пример #8
0
def _kill(glet, exception, waiter):
    try:
        glet.throw(exception)
    except:  # pylint:disable=bare-except, undefined-variable
        # XXX do we need this here?
        get_my_hub(glet).handle_error(glet, *sys_exc_info())
    if waiter is not None:
        waiter.switch(None)
Пример #9
0
def _kill(glet, exception, waiter):
    try:
        glet.throw(exception)
    except: # pylint:disable=bare-except
        # XXX do we need this here?
        glet.parent.handle_error(glet, *sys_exc_info())
    if waiter is not None:
        waiter.switch(None)
Пример #10
0
def _kill(glet, exception, waiter):
    try:
        glet.throw(exception)
    except:  # pylint:disable=bare-except
        # XXX do we need this here?
        glet.parent.handle_error(glet, *sys_exc_info())
    if waiter is not None:
        waiter.switch(None)
Пример #11
0
def _kill(glet, exception, waiter):
    try:
        glet.throw(exception)
    except: # pylint:disable=bare-except, undefined-variable
        # XXX do we need this here?
        get_my_hub(glet).handle_error(glet, *sys_exc_info())
    if waiter is not None:
        waiter.switch(None)
Пример #12
0
def _killall3(greenlets, exception, waiter):
    diehards = []
    for g in greenlets:
        if not g.dead:
            try:
                g.throw(exception)
            except: # pylint:disable=bare-except
                g.parent.handle_error(g, *sys_exc_info())
            if not g.dead:
                diehards.append(g)
    waiter.switch(diehards)
Пример #13
0
def _killall3(greenlets, exception, waiter):
    diehards = []
    for g in greenlets:
        if not g.dead:
            try:
                g.throw(exception)
            except:  # pylint:disable=bare-except
                g.parent.handle_error(g, *sys_exc_info())
            if not g.dead:
                diehards.append(g)
    waiter.switch(diehards)
Пример #14
0
    def run(self):
        try:
            self.__cancel_start()
            self._start_event = _start_completed_event

            try:
                result = self._run(*self.args, **self.kwargs)
            except:  # pylint:disable=bare-except
                self.__report_error(sys_exc_info())
            else:
                self.__report_result(result)
        finally:
            self.__free()
Пример #15
0
    def _notify_links(self):
        while self._links:
            # Early links are allowed to remove later links
            # before we get to them, and they're also allowed to
            # add new links, so we have to be careful about iterating.

            # We don't expect this list to be very large, so the time spent
            # manipulating it should be small. a deque is probably not justified.
            # Cython has optimizations to transform this into a memmove anyway.
            link = self._links.pop(0)
            try:
                link(self)
            except: # pylint:disable=bare-except
                self.parent.handle_error((link, self), *sys_exc_info())
Пример #16
0
    def create_search_index(self, pid, name, description):
        project_id = str(pid)

        fields = [
            gsearch.TextField(name='project_name', value=name),
            gsearch.TextField(name='project_description', value=description)
        ]

        doc = gsearch.Document(doc_id=project_id, fields=fields)
        try:
            gsearch.Index('project_search').put(doc)
        except gsearch.Error:
            print "Document not indexed due to -> {0}".format(
                sys_exc_info()[0])
Пример #17
0
    def _notify_links(self):
        while self._links:
            # Early links are allowed to remove later links
            # before we get to them, and they're also allowed to
            # add new links, so we have to be careful about iterating.

            # We don't expect this list to be very large, so the time spent
            # manipulating it should be small. a deque is probably not justified.
            # Cython has optimizations to transform this into a memmove anyway.
            link = self._links.pop(0)
            try:
                link(self)
            except:  # pylint:disable=bare-except
                self.parent.handle_error((link, self), *sys_exc_info())
Пример #18
0
    def run(self):
        try:
            self.__cancel_start()
            self._start_event = _start_completed_event

            try:
                result = self._run(*self.args, **self.kwargs)
            except:  # pylint:disable=bare-except
                self._report_error(sys_exc_info())
                return
            self._report_result(result)
        finally:
            self.__dict__.pop('_run', None)
            self.args = ()
            self.kwargs.clear()
Пример #19
0
    def run(self):
        try:
            self.__cancel_start()
            self._start_event = _start_completed_event

            try:
                result = self._run(*self.args, **self.kwargs)
            except: # pylint:disable=bare-except
                self._report_error(sys_exc_info())
                return
            self._report_result(result)
        finally:
            self.__dict__.pop('_run', None)
            self.args = ()
            self.kwargs.clear()
Пример #20
0
    def run(self) -> None:
        if self.fail_count >= config.AGENT.device_fail_count:
            log(f"Thread {self.log_name} failed too often => entering fail-sleep of {config.AGENT.device_fail_sleep} secs",
                level=3)
            time_sleep(config.AGENT.device_fail_sleep)

        log(f"Entering runtime of thread {self.log_name}", level=7)
        try:
            if self.once:
                while not self.state_stop.wait(self.sleep.total_seconds()):
                    self.execute(data=self.data)
                    Loop.stop_thread(self.loop_instance,
                                     description=self.description)
                    break

            else:
                while not self.state_stop.wait(self.sleep.total_seconds()):
                    if self.state_stop.isSet():
                        log(f"Exiting thread {self.log_name}", level=5)
                        break

                    else:
                        log(f"Starting thread {self.log_name}", level=5)
                        self.execute(data=self.data)

        except (RuntimeError, ValueError, IndexError, KeyError, AttributeError,
                TypeError) as error_msg:
            self.fail_count += 1
            log(f"Thread {self.log_name} failed with error: \"{error_msg}\"",
                level=1)
            log(f"{format_exc(limit=config.LOG_MAX_TRACEBACK_LENGTH)}",
                level=4)

            if not self.once:
                self.run()

        except:
            self.fail_count += 1
            exc_type, exc_obj, _ = sys_exc_info()
            log(f"Thread {self.log_name} failed with error: \"{exc_type} - {exc_obj}\"",
                level=1)
            log(f"{format_exc(limit=config.LOG_MAX_TRACEBACK_LENGTH)}",
                level=4)

            if not self.once:
                self.run()
Пример #21
0
    def _run(self):
        try:
            self._wait(seconds=config.SVC_WAIT_TIME)
            fns_log('Entering service runtime', level=7)
            run_last_reload_time = time()
            run_last_status_time = time()

            while True:
                if time() > (run_last_reload_time +
                             config.AGENT.svc_interval_reload):
                    self.reload()
                    break

                if time() > (run_last_status_time +
                             config.AGENT.svc_interval_status):
                    self._status()
                    run_last_status_time = time()

                time_sleep(config.SVC_LOOP_INTERVAL)

        except:
            try:
                exc_type, error, _ = sys_exc_info()

                if str(error).find('Service exited') == -1:
                    fns_log(
                        f"A fatal error occurred: \"{exc_type} - {error}\"")
                    fns_log(
                        f"{format_exc(limit=config.LOG_MAX_TRACEBACK_LENGTH)}")

            except IndexError:
                pass

            if self.exit_count > 0:
                fns_log('Skipping service stop (gracefully) -> exiting (hard)',
                        level=5)
                self._exit()

            else:
                self.stop()
Пример #22
0
def server_thread(srv, connection, client):
    # handles client connections; did not work as method of 'Server'..
    try:
        start_time = time()
        srv.CLIENT_THREADS += 1
        srv.LOG(f'{srv.LOG_PREFIX}Entered Client-Thread #{srv.CLIENT_THREADS}',
                level=6)
        data = Interact(link=connection, server=True, logger=srv.LOG).receive()
        srv.LOG(f"{srv.LOG_PREFIX}Received data: '{data}'", level=6)

        if data is None:
            srv.LOG(f'{srv.LOG_PREFIX}Unable to get route', level=6)
            Interact(link=connection, server=True, logger=srv.LOG).send(
                data=socket_config.NONE_RESULT
            )  # return none-result to client so it does not wait for a response that will never come

        else:
            # parsing command and executing api
            result = Route(parsed=data).go()
            Interact(
                link=connection, server=True,
                logger=srv.LOG).send(data=f"{data['path']}"
                                     f"{socket_config.PACKAGE_PATH_SEPARATOR}"
                                     f"{result}")

        srv.LOG(
            f"{srv.LOG_PREFIX}Processed client connection to '{client}' in %.3f secs"
            % (time() - start_time),
            level=6)
        srv.CLIENT_THREADS -= 1
        return data

    except:
        srv.CLIENT_THREADS -= 1
        exc_type, exc_obj, _ = sys_exc_info()
        srv.LOG(
            f"Client connection '{client}' failed with error: \"{exc_type} - {exc_obj}\"",
            level=1)
        srv.LOG(f"{format_exc(limit=config.LOG_MAX_TRACEBACK_LENGTH)}",
                level=4)
Пример #23
0
                                move(f_in_PNE, f_out_PNE)
                                print('+', end='')
                        else:
                            if os_path.isfile(f_out_PNE):
                                print('_', end='')  # already renamed
                            else:
                                print('-', end='')  # no sach file

        else:
            print('"done nothing"')
    print('OK>')
    try:
        pass
    except Exception as e:
        print('The end. There are error ', standard_error_info(e))

        import traceback, code
        from sys import exc_info as sys_exc_info

        tb = sys_exc_info()[2]  # type, value,
        traceback.print_exc()
        last_frame = lambda tb=tb: last_frame(tb.tb_next) if tb.tb_next else tb
        frame = last_frame().tb_frame
        ns = dict(frame.f_globals)
        ns.update(frame.f_locals)
        code.interact(local=ns)
    finally:
        if 'log' in cfg['program'].keys():
            f.close()
        print('Ok')
Пример #24
0
def main(new_arg=None, **kwargs):
    """

    :param new_arg: list of strings, command line arguments
    :kwargs: dicts for each section: to overwrite values in them (overwrites even high priority values, other values remains)
    Note: if new_arg=='<cfg_from_args>' returns cfg but it will be None if argument
     argv[1:] == '-h' or '-v' passed to this code
    argv[1] is cfgFile. It was used with cfg files:
        'csv2h5_nav_supervisor.ini'
        'csv2h5_IdrRedas.ini'
        'csv2h5_Idronaut.ini'

    :return:
    """
    global l

    cfg = cfg_from_args(my_argparser(), new_arg, **kwargs)
    if not cfg or not cfg['program'].get('return'):
        print('Can not initialise')
        return cfg
    elif cfg['program']['return'] == '<cfg_from_args>':  # to help testing
        return cfg

    l = init_logging(logging, None, cfg['program']['log'],
                     cfg['program']['verbose'])
    print('\n' + this_prog_basename(__file__), end=' started. ')
    try:
        cfg['in']['paths'], cfg['in']['nfiles'], cfg['in'][
            'path'] = init_file_names(**cfg['in'],
                                      b_interact=cfg['program']['b_interact'])
    except Ex_nothing_done as e:
        print(e.message)
        return ()

    bOld_FF00FF = False
    # if 'TermGrunt' in sys.argv[1] FF00FF' in str(cfg['in']['path']):  # 'TermGrunt.h5'  ? args.path.endswith ('bin'):
    #     bOld_FF00FF = True
    #     cfg['in'].update({
    #     'header': 'TERM',
    #     'dt_from_utc': timedelta(hours=-1),
    #     'fs': 1, 'b_time_fromtimestamp': True,
    #     'b_time_fromtimestamp_source': False})
    # else:  # 'Katran.h5'
    #     cfg['in'].update({
    #     'delimiter_hex': '000000E6',
    #     'header': 'P, Temp, Cond',
    #     'dt_from_utc': timedelta(hours=0),
    #     'fs': 10, 'b_time_fromtimestamp': False,
    #     'b_time_fromtimestamp_source': False})

    set_field_if_no(
        cfg['in'], 'dtype', 'uint{:d}'.format(2**(3 + np.searchsorted(
            2**np.array([3, 4, 5, 6, 7]) > np.array(
                8 * (cfg['in']['data_word_len'] - 1)), 1))))

    # Prepare cpecific format loading and writing
    set_field_if_no(cfg['in'], 'coltime', [])
    cfg['in'] = init_input_cols(cfg['in'])
    cfg['out']['names'] = np.array(cfg['in']['dtype'].names)[ \
        cfg['in']['cols_loaded_save_b']]
    cfg['out']['formats'] = [
        cfg['in']['dtype'].fields[n][0] for n in cfg['out']['names']
    ]
    cfg['out']['dtype'] = np.dtype({
        'formats': cfg['out']['formats'],
        'names': cfg['out']['names']
    })
    h5init(cfg['in'], cfg['out'])

    # cfg['Period'] = 1.0 / cfg['in']['fs']  # instead Second can use Milli / Micro / Nano:
    # cfg['pdPeriod'] = pd.to_timedelta(cfg['Period'], 's')
    # #pd.datetools.Second(cfg['Period'])\
    #     if 1 % cfg['in']['fs'] == 0 else\
    #     pd.datetools.Nano(cfg['Period'] * 1e9)

    # log table of loaded files. columns: Start time, file name, and its index in array off all loaded data:
    log_item = cfg['out']['log'] = {
    }  # fields will have: 'fileName': None, 'fileChangeTime': None, 'rows': 0

    strLog = ''
    # from collections import namedtuple
    # type_log_files = namedtuple('type_log_files', ['label','iStart'])
    # log.sort(axis=0, order='log_item['Date0']')#sort files by time

    dfLogOld, cfg['out']['db'], cfg['out'][
        'b_skip_if_up_to_date'] = h5temp_open(**cfg['out'])
    if 'log' in cfg['program'].keys():
        f = open(PurePath(sys_argv[0]).parent / cfg['program']['log'],
                 'a',
                 encoding='cp1251')
        f.writelines(
            datetime.now().strftime('\n\n%d.%m.%Y %H:%M:%S> processed ' +
                                    str(cfg['in']['nfiles']) + ' file' +
                                    's:' if cfg['in']['nfiles'] > 1 else ':'))
    b_remove_duplicates = False  # normally no duplicates but will if detect
    # Config specially for readBinFramed
    set_field_if_no(cfg['in'], 'b_byte_order_is_big_endian', True)
    set_field_if_no(cfg['in'], 'b_baklan', False)
    set_field_if_no(cfg['in'], 'b_time_fromtimestamp_source', False)
    cfg['out']['fs'] = cfg['in']['fs']
    if True:
        ## Main circle ############################################################
        for i1_file, path_in in h5_dispenser_and_names_gen(
                cfg['in'], cfg['out']):
            l.info('{}. {}: '.format(i1_file, path_in.name))

            # Loading data
            if bOld_FF00FF:
                V = readFF00FF(path_in, cfg)
                iFrame = np.arange(len(V))
            else:
                V, iFrame = readBinFramed(path_in, cfg['in'])
            if ('b_time_fromtimestamp' in cfg['in'] and cfg['in']['b_time_fromtimestamp']) or \
                    ('b_time_fromtimestamp_source' in cfg['in'] and cfg['in']['b_time_fromtimestamp_source']):
                path_in_rec = os_path.join(
                    'd:\\workData\\_source\\BalticSea\\151021_T1Grunt_Pregol\\_source\\not_corrected',
                    os_path.basename(path_in)[:-3] + 'txt'
                ) if cfg['in']['b_time_fromtimestamp_source'] else path_in
                log_item['Date0'] = datetime.fromtimestamp(
                    os_path.getmtime(path_in_rec))  # getctime is bad
                log_item['Date0'] -= iFrame[-1] * timedelta(
                    seconds=1 / cfg['in']['fs']
                )  # use for computer filestamp at end of recording
            else:
                log_item['Date0'] = datetime.strptime(
                    path_in.stem, cfg['in']['filename2timestart_format'])
            log_item['Date0'] += cfg['in']['dt_from_utc']
            tim = log_item['Date0'] + iFrame * timedelta(
                seconds=1 / cfg['in']['fs']
            )  # tim = pd.date_range(log_item['Date0'], periods=np.size(V, 0), freq=cfg['pdPeriod'])
            df = pd.DataFrame(
                V.view(dtype=cfg['out']['dtype']),  # np.uint16
                columns=cfg['out']['names'],
                index=tim)
            # pd.DataFrame(V, columns=cfg['out']['names'], dtype=cfg['out']['formats'], index=tim)
            if df.empty:  # log['rows']==0
                print('No data => skip file')
                continue

            df, tim = set_filterGlobal_minmax(df,
                                              cfg_filter=cfg['filter'],
                                              log=log_item,
                                              dict_to_save_last_time=cfg['in'])
            if log_item['rows_filtered']:
                print('filtered out {}, remains {}'.format(
                    log_item['rows_filtered'], log_item['rows']))
            if not log_item['rows']:
                l.warning('no data! => skip file')
                continue
            elif log_item['rows']:
                print(
                    '.', end=''
                )  # , divisions=d.divisions), divisions=pd.date_range(tim[0], tim[-1], freq='1D')
            else:
                l.warning('no data! => skip file')
                continue

            # Append to Store
            h5_append(cfg['out'], df.astype('int32'), log_item)

            if 'txt' in cfg['program'].keys():  # can be saved as text too
                np.savetxt(cfg['program']['txt'],
                           V,
                           delimiter='\t',
                           newline='\n',
                           header=cfg['in']['header'] + log_item['fileName'],
                           fmt='%d',
                           comments='')

    try:
        if b_remove_duplicates:
            for tblName in (cfg['out']['table'] +
                            cfg['out']['tableLog_names']):
                cfg['out']['db'][tblName].drop_duplicates(
                    keep='last', inplace=True)  # subset='fileName',?
        if len(strLog):
            print('Create index', end=', ')
            for tblName in (cfg['out']['table'] +
                            cfg['out']['tableLog_names']):
                cfg['out']['db'].create_table_index(tblName,
                                                    columns=['index'],
                                                    kind='full')
        else:
            print('done nothing')
    except Exception as e:
        l.exception('The end. There are error ')

        import traceback, code
        from sys import exc_info as sys_exc_info

        tb = sys_exc_info()[2]  # type, value,
        traceback.print_exc()
        last_frame = lambda tb=tb: last_frame(tb.tb_next) if tb.tb_next else tb
        frame = last_frame().tb_frame
        ns = dict(frame.f_globals)
        ns.update(frame.f_locals)
        code.interact(local=ns)
    # sort index if have any processed data (needed because ``ptprepack`` not closses hdf5 source if it not finds data)
    if cfg['in'].get('time_last'):
        failed_storages = h5move_tables(cfg['out'])
        print('Ok.', end=' ')
        h5index_sort(
            cfg['out'],
            out_storage_name=f"{cfg['out']['db_path'].stem}-resorted.h5",
            in_storages=failed_storages)
Пример #25
0
def main(new_arg=None):
    """

    :param new_arg: returns cfg if new_arg=='<cfg_from_args>' but it will be None if argument
     argv[1:] == '-h' or '-v' passed to this code
    argv[1] is cfgFile. It was used with cfg files:
        'csv2h5_nav_supervisor.ini'
        'csv2h5_IdrRedas.ini'
        'csv2h5_Idronaut.ini'
    :return:
    """

    global l
    cfg = cfg_from_args(my_argparser(), new_arg)
    if not cfg or not cfg['program'].get('return'):
        print('Can not initialise')
        return cfg
    elif cfg['program']['return'] == '<cfg_from_args>':  # to help testing
        return cfg

    l = init_logging(logging, None, cfg['program']['log'],
                     cfg['program']['verbose'])
    print('\n' + this_prog_basename(__file__), end=' started. ')
    try:
        cfg['in']['paths'], cfg['in']['nfiles'], cfg['in'][
            'path'] = init_file_names(**{
                **cfg['in'], 'path': cfg['in']['db_path']
            },
                                      b_interact=cfg['program']['b_interact'])
        set_field_if_no(
            cfg['in'], 'tables_log', '{}/logFiles'
        )  # will be filled by each table from cfg['in']['tables']
        cfg['in']['query'] = query_time_range(**cfg['in'])
        set_field_if_no(cfg['out'], 'db_path', cfg['in']['db_path'])
        # cfg['out'] = init_file_names(cfg['out'], , path_field='db_path')
    except Ex_nothing_done as e:
        print(e.message)
        return ()

    # args = parser.parse_args()
    # args.verbose= args.verbose[0]
    # try:
    #     cfg= ini2dict(args.cfgFile)
    #     cfg['in']['cfgFile']= args.cfgFile
    # except IOError as e:
    #     print('\n==> '.join([a for a in e.args if isinstance(a,str)])) #e.message
    #     raise(e)
    # Open text log
    if 'log' in cfg['program'].keys():
        dir_create_if_need(os_path.dirname(cfg['program']['log']))
        flog = open(cfg['program']['log'], 'a+', encoding='cp1251')

    cfg['out']['log'] = OrderedDict({'fileName': None, 'fileChangeTime': None})

    # Prepare saving to csv
    if 'file_names_add_fun' in cfg['out']:
        file_names_add = eval(
            compile(cfg['out']['file_names_add_fun'], '', 'eval'))
    else:
        file_names_add = lambda i: '.csv'  # f'_{i}.csv'

    # Prepare data for output store and open it
    if cfg['out']['tables'] == ['None']:
        # will not write new data table and its log
        cfg['out']['tables'] = None
        # cfg['out']['tables_log'] = None  # for _runs cfg will be redefined (this only None case that have sense?)

    h5init(cfg['in'], cfg['out'])
    # store, dfLogOld = h5temp_open(**cfg['out'])

    cfg_fileN = os_path.splitext(cfg['in']['cfgFile'])[0]
    out_tables_log = cfg['out'].get('tables_log')
    if cfg_fileN.endswith('_runs') or (bool(out_tables_log)
                                       and 'logRuns' in out_tables_log[0]):

        # Will calculate only after filter  # todo: calculate derived parameters before were they are bad (or replace all of them if any bad?)
        func_before_cycle = lambda x: None
        func_before_filter = lambda df, log_row, cfg: df
        func_after_filter = lambda df, cfg: log_runs(df, cfg, cfg['out']['log']
                                                     )

        # this table will be added:
        cfg['out']['tables_log'] = [cfg['out']['tables'][0] + '/logRuns']
        cfg['out'][
            'b_log_ready'] = True  # to not apdate time range in h5_append()

        # Settings to not affect main data table and switch off not compatible options:
        cfg['out']['tables'] = []
        cfg['out'][
            'b_skip_if_up_to_date'] = False  # todo: If False check it: need delete all previous result of CTD_calc() or set min_time > its last log time. True not implemented?
        cfg['program'][
            'b_log_display'] = False  # can not display multiple rows log
        if 'b_save_images' in cfg['extract_runs']:
            cfg['extract_runs']['path_images'] = cfg['out'][
                'db_path'].with_name('_subproduct')
            dir_create_if_need(cfg['extract_runs']['path_images'])
    else:
        if 'brown' in cfg_fileN.lower():
            func_before_cycle = load_coef
            if 'Lat' in cfg['in']:
                func_before_filter = lambda *args, **kwargs: add_ctd_params(
                    process_brown(*args, **kwargs), kwargs['cfg'])
            else:
                func_before_filter = process_brown
        else:
            func_before_cycle = lambda x: None

            def ctd_coord_and_params(df: pd.DataFrame, log_row, cfg):
                coord_data_col_ensure(df, log_row)
                return add_ctd_params(df, cfg)

            func_before_filter = ctd_coord_and_params
        func_after_filter = lambda df, cfg: df  # nothing after filter

    func_before_cycle(cfg)  # prepare: usually assign data to cfg['for']
    if cfg['out'].get('path_csv'):
        dir_create_if_need(cfg['out']['path_csv'])
    # Load data Main circle #########################################
    # Open input store and cicle through input table log records
    qstr_trange_pattern = "index>=Timestamp('{}') & index<=Timestamp('{}')"
    iSt = 1

    dfLogOld, cfg['out']['db'], cfg['out'][
        'b_skip_if_up_to_date'] = h5temp_open(**cfg['out'])
    b_out_db_is_different = cfg['out']['db'] is not None and cfg['out'][
        'db_path_temp'] != cfg['in']['db_path']
    # Cycle for each table, for each row in log:
    # for path_csv in gen_names_and_log(cfg['out'], dfLogOld):
    with FakeContextIfOpen(
            lambda f: pd.HDFStore(f, mode='r'), cfg['in']['db_path'],
            None if b_out_db_is_different else cfg['out']['db']
    ) as cfg['in']['db']:  # not opens ['in']['db'] if already opened to write

        for tbl in cfg['in']['tables']:
            if False:  # Show table info
                nodes = sorted(
                    cfg['out']['db'].root.__members__)  # , key=number_key
                print(nodes)
            print(tbl, end='. ')

            df_log = cfg['in']['db'].select(cfg['in']['tables_log'].format(tbl)
                                            or tbl,
                                            where=cfg['in']['query'])
            if True:  # try:
                if 'log' in cfg['program'].keys():
                    nRows = df_log.rows.size
                    flog.writelines(datetime.now().strftime(
                        '\n\n%d.%m.%Y %H:%M:%S> processed ') + f'{nRows} row' +
                                    ('s:' if nRows > 1 else ':'))

                for ifile, r in enumerate(df_log.itertuples(),
                                          start=iSt):  # name=None
                    print('.', end='')
                    sys_stdout.flush()

                    path_raw = PurePath(r.fileName)
                    cfg['out']['log'].update(fileName=path_raw.name,
                                             fileChangeTime=r.fileChangeTime)
                    # save current state
                    cfg['in']['file_stem'] = cfg['out']['log'][
                        'fileName']  # for exmple to can extract date in subprogram
                    cfg['in']['fileChangeTime'] = cfg['out']['log'][
                        'fileChangeTime']

                    if cfg['in']['b_skip_if_up_to_date']:
                        have_older_data, have_duplicates = h5del_obsolete(
                            cfg['out'], cfg['out']['log'], dfLogOld)
                        if have_older_data:
                            continue
                        if have_duplicates:
                            cfg['out']['b_remove_duplicates'] = True
                    print('{}. {}'.format(ifile, path_raw.name), end=': ')

                    # Load data
                    qstr = qstr_trange_pattern.format(r.Index, r.DateEnd)
                    df_raw = cfg['in']['db'].select(tbl, qstr)
                    cols = df_raw.columns.tolist()

                    # cfg['in']['lat'] and ['lon'] may be need in add_ctd_params() if Lat not in df_raw
                    if 'Lat_en' in df_log.columns and 'Lat' not in cols:
                        cfg['in']['lat'] = np.nanmean((r.Lat_st, r.Lat_en))
                        cfg['in']['lon'] = np.nanmean((r.Lon_st, r.Lon_en))

                    df = func_before_filter(df_raw, log_row=r, cfg=cfg)

                    if df.size:  # size is zero means save only log but not data
                        # filter, updates cfg['out']['log']['rows']
                        df, _ = set_filterGlobal_minmax(
                            df, cfg['filter'], cfg['out']['log'])
                    if 'rows' not in cfg['out']['log']:
                        l.warning('no data!')
                        continue
                    elif isinstance(cfg['out']['log']['rows'], int):
                        print('filtered out {rows_filtered}, remains {rows}'.
                              format_map(cfg['out']['log']))
                        if cfg['out']['log']['rows']:
                            print('.', end='')
                        else:
                            l.warning('no data!')
                            continue

                    df = func_after_filter(df, cfg=cfg)

                    # Append to Store
                    h5_append(cfg['out'],
                              df,
                              cfg['out']['log'],
                              log_dt_from_utc=cfg['in']['dt_from_utc'])

                    # Copy to csv
                    if cfg['out'].get('path_csv'):
                        fname = '{:%y%m%d_%H%M}-{:%d_%H%M}'.format(
                            r.Index, r.DateEnd) + file_names_add(ifile)
                        if not 'data_columns' in cfg['out']:
                            cfg['out']['data_columns'] = slice(0,
                                                               -1)  # all cols
                        df.to_csv(  # [cfg['out']['data_columns']]
                            cfg['out']['path_csv'] / fname,
                            date_format=cfg['out']['text_date_format'],
                            float_format='%5.6g',
                            index_label='Time'
                        )  # to_string, line_terminator='\r\n'

                    # Log to screen (if not prohibited explicitly)
                    if cfg['out']['log'].get('Date0') is not None and (
                        ('b_log_display' not in cfg['program'])
                            or cfg['program']['b_log_display']):
                        str_log = '{fileName}:\t{Date0:%d.%m.%Y %H:%M:%S}-' \
                                  '{DateEnd:%d. %H:%M:%S%z}\t{rows}rows'.format_map(
                            cfg['out']['log'])  # \t{Lat}\t{Lon}\t{strOldVal}->\t{mag}
                        l.info(str_log)
                    else:
                        str_log = str(cfg['out']['log'].get('rows', '0'))
                    # Log to logfile
                    if 'log' in cfg['program'].keys():
                        flog.writelines('\n' + str_log)

    if b_out_db_is_different:
        try:
            if cfg['out']['tables'] is not None:
                print('')
                if cfg['out']['b_remove_duplicates']:
                    h5remove_duplicates(cfg['out'],
                                        cfg_table_keys=('tables',
                                                        'tables_log'))
                # Create full indexes. Must be done because of using ptprepack in h5move_tables() below
                l.debug('Create index')
                for tblName in (cfg['out']['tables'] +
                                cfg['out']['tables_log']):
                    try:
                        cfg['out']['db'].create_table_index(tblName,
                                                            columns=['index'],
                                                            kind='full')
                    except Exception as e:
                        l.warning(
                            ': table {}. Index not created - error'.format(
                                tblName), '\n==> '.join(
                                    [s for s in e.args if isinstance(s, str)]))
        except Exception as e:
            l.exception('The end. There are error ')

            import traceback, code
            from sys import exc_info as sys_exc_info
            tb = sys_exc_info()[2]  # type, value,
            traceback.print_exc()
            last_frame = lambda tb=tb: last_frame(tb.tb_next
                                                  ) if tb.tb_next else tb
            frame = last_frame().tb_frame
            ns = dict(frame.f_globals)
            ns.update(frame.f_locals)
            code.interact(local=ns)
        finally:

            cfg['out']['db'].close()
            if cfg['program']['log']:
                flog.close()
            if cfg['out']['db'].is_open:
                print('Wait store is closing...')
                sleep(2)

            failed_storages = h5move_tables(cfg['out'])
            print('Finishing...' if failed_storages else 'Ok.', end=' ')
            h5index_sort(
                cfg['out'],
                out_storage_name=f"{cfg['out']['db_path'].stem}-resorted.h5",
                in_storages=failed_storages)
Пример #26
0
async def on_message(message):
    command = 'bracket' # Pre-setting the variable for use in the try/except block below (L78)
    # If the bot is the user, do not respond
    if message.author == client.user:
        return

    # If the bot is mentioned in a message, respond with a message informing it of being a bot
    if client.user.mentioned_in(message):
        # If @everyone or @here is used, ignore
        if "@everyone" in message.content or "@here" in message.content:
            return
        # Choose from a random response, then follow with a Bot message
        responses = ["Ok", "Thanks", "Sounds good to me", "Buff Rashid", "Buff Rashid", "Beep Boop", "Yes", "No", "Good to know", "Glad to hear it", "I'll keep that in mind", "The answer lies in the heart of battle", "Go home and be a family man"]
        await message.channel.send("{0} \n**I am a Bot that plays Rashid. Mentions cause my little Rashid brain to short circuit. Did you have ~~an eagle spi~~ a command?**".format(random_choice(responses)))
        return

    try:
        # Check if the channel is in the DB
        # Add it if it isn't
        if not settings_exist(message.guild.id, message.channel.id):
            raise Exception("Lizard-BOT failed to create DB entry for: " + message.guild.name + ". Guild ID: " + message.guild.id)

        # Get prefix for the guild
        prefix = read_db('guild', 'prefix-lizard', message.guild.id)

        # Check if the attempted_cmd is !prefix-lizard and has too many args
        if (message.content.split(' ')[0] == "!prefix-lizard" or message.content.split(' ')[0] == "!prefliz") and len(message.content.split()) > 1:
            await message.channel.send("Too many arguments. Check help-lizard for more info")
            return
        # Hardcode prefix command to be accessible via !
        elif message.content.split(' ')[0] == "!prefix-lizard" or message.content.split(' ')[0] == "!prefliz":
            response = await client.interface.call_command('prefix-lizard', 0, 0, 0, guild=message.guild.id)
            if response:
                await message.channel.send(response)
            return
        # If other commands don't start with the correct prefix, do nothing
        elif not message.content.startswith(prefix):
            return
        # Check if the attempted_cmd takes arguments
        elif message.content.split(' ')[0][1:].lower() in client.no_arg_cmds and len(message.content.split()) > 1:
            await message.channel.send("Too many arguments. Check help-lizard for more info")
            return

        # Rotate through commands to see if the message matches
        for command in client.commands:
            command = command.lower() # Lower the command for easier matching
            msg = message.content # The message
            attempted_cmd = msg.split(' ')[0][1:].lower() # Get the attempted command from the beginning of the string

            if attempted_cmd in ['challonge', 'chal', 'edit'] and len(msg.split(' ')) > 1:
                attempted_cmd += ' ' + msg.split(' ')[1].lower()

            # Check if the message begins with a command
            if attempted_cmd and attempted_cmd == command:
                user = message.author # The author
                kwargs = {'guild':message.guild.id}

                # Remove the command from the start
                msg = msg[len(command)+1:].strip()

                # Check command to see if we need keyword args
                if command in ['challonge checkin', 'chal checkin']:
                    kwargs['guild_members'] = message.guild.members
                elif command in ['edit botrole', 'edit role']:
                    kwargs['guild_default_role'] = message.guild.default_role
                    kwargs['role_mentions'] = message.role_mentions
                elif command in ['edit bracket', 'edit pingtest', 'edit status', 'edit seeding', 'edit stream', 'edit tos']:
                    kwargs['channel_mentions'] = message.channel_mentions
                    if command in ['edit tos']:
                        kwargs['mentions'] = message.mentions
                elif command in ['draw']:
                    kwargs['full_msg'] = message
                    kwargs['client'] = client

                # Await the interface calling the command
                response = await client.interface.call_command(command, msg, user, message.channel, **kwargs)
                # If there is a response, send it
                if response:
                    await message.channel.send(response)
                break
    except Exception:
        string_info = str(sys_exc_info()[1]) # Error message
        function_name = string_info.split(':')[0] # The command the error message came from

        # Expected error
        # Return friendly user message
        # Additional checks needed for challonge and edit commands that have multiple subcommands
        if client.interface._func_mapping[command].__name__ in function_name.strip("*").lower() or ('challonge' in client.interface._func_mapping[command].__name__ and 'challonge' in function_name.strip("*").lower()) or ('edit' in client.interface._func_mapping[command].__name__ and 'edit' in function_name.strip("*").lower()):
            await message.channel.send(function_name.replace('_', '-') + ': ' + ':'.join(string_info.split(':')[1:]))
        elif command == 'dev':
            # Return user message
            await message.channel.send(escape_markdown(traceback_format_exc()))
            # Print error to console
            traceback_print_exc()
        else:
            # Print error to console
            traceback_print_exc()
            # If we get this far and something breaks
            # Something is very wrong. Send user generic error message
            await message.channel.send("I is broken.\nBuff Rashid and submit an issue via <https://github.com/lizardman301/Lizard-bot-rsf/issues>\nOr just tell Lizardman301. That's what I do.")