Ejemplo n.º 1
0
def _generate_serial(func, args_list, prog=True, verbose=True, nTasks=None,
                     quiet=QUIET, **kwargs):
    """ internal serial generator  """
    if nTasks is None:
        nTasks = len(args_list)
    if verbose and not quiet:
        print('[util_parallel._generate_serial] executing %d %s tasks in serial' %
                (nTasks, get_funcname(func)))
    prog = prog and verbose and nTasks > 1
    # Get iterator with or without progress
    verbose = verbose or not quiet
    lbl = '(sergen) %s: ' % (get_funcname(func),)
    args_iter = (
        util_progress.ProgressIter(args_list, nTotal=nTasks,
                                   lbl=lbl,
                                   freq=kwargs.get('freq', None),
                                   adjust=kwargs.get('adjust', False),
                                   verbose=verbose)
        if prog else args_list
    )
    if __TIME_GENERATE__:
        tt = util_time.tic('_generate_serial func=' + get_funcname(func))
    for args in args_iter:
        result = func(args)
        yield result
    if __TIME_GENERATE__:
        util_time.toc(tt)
Ejemplo n.º 2
0
def _generate_serial(func,
                     args_list,
                     prog=True,
                     verbose=True,
                     nTasks=None,
                     **kwargs):
    """ internal serial generator  """
    if nTasks is None:
        nTasks = len(args_list)
    if verbose:
        print(
            '[util_parallel._generate_serial] executing %d %s tasks in serial'
            % (nTasks, get_funcname(func)))
    prog = prog and verbose and nTasks > 1
    # Get iterator with or without progress
    lbl = '(sergen) %s: ' % (get_funcname(func), )
    args_iter = (util_progress.ProgressIter(args_list,
                                            nTotal=nTasks,
                                            lbl=lbl,
                                            freq=kwargs.get('freq', None),
                                            adjust=kwargs.get('adjust', False))
                 if prog else args_list)
    if __TIME_GENERATE__:
        tt = util_time.tic('_generate_serial func=' + get_funcname(func))
    for args in args_iter:
        result = func(args)
        yield result
    if __TIME_GENERATE__:
        util_time.toc(tt)
Ejemplo n.º 3
0
def __argv_flag_dec(func, default=False, quiet=QUIET):
    flag = meta_util_six.get_funcname(func)
    if flag.find('no') == 0:
        flag = flag[2:]
    flag = '--' + flag.replace('_', '-')

    def GaurdWrapper(*args, **kwargs):
        # FIXME: the --print-all is a hack
        default_ = kwargs.pop('default', default)
        alias_flags = kwargs.pop('alias_flags', [])
        is_flagged = (get_argflag(flag, default_) or
                      get_argflag('--print-all') or
                      any([get_argflag(_) for _ in alias_flags]))
        if is_flagged:
            indent_lbl = flag.replace('--', '').replace('print-', '')
            print('')
            print('\n+ --- ' + indent_lbl + ' ___')
            #with util_print.Indenter('[%s]' % indent_lbl):
            ret = func(*args, **kwargs)
            print('L ___ ' + indent_lbl + '___\n')
            return ret
        else:
            if not quiet:
                print('\n~~~ %s ~~~' % flag)
    meta_util_six.set_funcname(GaurdWrapper, meta_util_six.get_funcname(func))
    return GaurdWrapper
Ejemplo n.º 4
0
def process(func, args_list, args_dict={}, force_serial=None,
            nTasks=None, quiet=QUIET):
    """
    Use ut.generate rather than ut.process

    Args:
        func (func):
        args_list (list or iter):
        args_dict (dict):
        force_serial (bool):

    Returns:
        result of parallel map(func, args_list)

    CommandLine:
        python -m utool.util_parallel --test-process

    Example:
        >>> # SLOW_DOCTEST
        >>> import utool as ut
        >>> num = 8700  # parallel is slower for smaller numbers
        >>> flag_generator0 = ut.process(ut.is_prime, list(zip(range(0, num))), force_serial=True)
        >>> flag_list0 = list(flag_generator0)
        >>> flag_generator1 = ut.process(ut.is_prime, list(zip(range(0, num))), force_serial=False)
        >>> flag_list1 = list(flag_generator1)
        >>> assert flag_list0 == flag_list1
    """
    if __FORCE_SERIAL__:
        force_serial = __FORCE_SERIAL__
    if FUTURE_ON:
        raise AssertionError('USE FUTURES')

    if USE_GLOBAL_POOL:
        ensure_pool(quiet=quiet)
    if nTasks is None:
        nTasks = len(args_list)
    if __POOL__ == 1 or force_serial:
        if not quiet:
            print('[util_parallel] executing %d %s tasks in serial' %
                  (nTasks, get_funcname(func)))
        result_list = _process_serial(func, args_list, args_dict, nTasks=nTasks,
                                      quiet=quiet)
    else:
        if __POOL__ is None:
            pool = new_pool(num_procs=get_default_numprocs(),
                            init_worker=init_worker,
                            maxtasksperchild=None)
        else:
            pool = __POOL__
        if not quiet:
            print('[util_parallel] executing %d %s tasks using %d processes' %
                  (nTasks, get_funcname(func), pool._processes))
        result_list = _process_parallel(func, args_list, args_dict, nTasks=nTasks,
                                        quiet=quiet, pool=pool)
    return result_list
Ejemplo n.º 5
0
 def wrp_adder(*args, **kwargs):
     if DEBUG_ADDERS or VERB_CONTROL:
         print('+------')
         print('[ADD]: ' + get_funcname(func))
         funccall_str = ut.func_str(func, args, kwargs, packed=True)
         print('\n' + funccall_str + '\n')
         print('L------')
     if VERB_CONTROL:
         print('[ADD]: ' + get_funcname(func))
         builtins.print('\n' + ut.func_str(func, args, kwargs) + '\n')
     return func_(*args, **kwargs)
Ejemplo n.º 6
0
 def wrp_adder(*args, **kwargs):
     if DEBUG_ADDERS or VERB_CONTROL:
         print('+------')
         print('[ADD]: ' + get_funcname(func))
         funccall_str = ut.func_str(func, args, kwargs, packed=True)
         print('\n' + funccall_str + '\n')
         print('L------')
     if VERB_CONTROL:
         print('[ADD]: ' + get_funcname(func))
         builtins.print('\n' + ut.func_str(func, args, kwargs) + '\n')
     return func_(*args, **kwargs)
Ejemplo n.º 7
0
 def wrp_adder(*args, **kwargs):
     if DEBUG_ADDERS or VERB_CONTROL:
         print("+------")
         print("[ADD]: " + get_funcname(func))
         funccall_str = ut.func_str(func, args, kwargs, packed=True)
         print("\n" + funccall_str + "\n")
         print("L------")
     if VERB_CONTROL:
         print("[ADD]: " + get_funcname(func))
         builtins.print("\n" + ut.func_str(func, args, kwargs) + "\n")
     return func_(*args, **kwargs)
Ejemplo n.º 8
0
def _process_parallel(func, args_list, args_dict={}, nTasks=None):
    """
    Parallel process map

    Use generate instead
    """
    # Define progress observers
    if nTasks is None:
        nTasks = len(args_list)
    num_tasks_returned_ptr = [0]
    mark_prog, end_prog = util_progress.progress_func(
        max_val=nTasks, lbl=get_funcname(func) + ': ')
    def _callback(result):
        mark_prog(num_tasks_returned_ptr[0])
        sys.stdout.flush()
        num_tasks_returned_ptr[0] += 1
    # Send all tasks to be executed asynconously
    apply_results = [__POOL__.apply_async(func, args, args_dict, _callback)
                     for args in args_list]
    # Wait until all tasks have been processed
    while num_tasks_returned_ptr[0] < nTasks:
        #print('Waiting: ' + str(num_tasks_returned_ptr[0]) + '/' + str(nTasks))
        pass
    end_prog()
    # Get the results
    result_list = [ap.get() for ap in apply_results]
    if __EAGER_JOIN__:
        close_pool()
    return result_list
Ejemplo n.º 9
0
 def profile_withfuncname_filter(func):
     # Test to see if this function is specified
     if _profile_func_flag(meta_util_six.get_funcname(func)):
         if __DEBUG_PROF__:
             print('profile func %r' % (func,))
         return PROFILE_FUNC(func)
     return func
Ejemplo n.º 10
0
 def checkinfo_wrapper(*args, **kwargs):
     suggested_fix = ''
     funcname = get_funcname(func)
     packagename = funcname.replace('_version', '')
     pipname_ = pipname if pipname is not None else packagename
     try:
         infodict = func(*args, **kwargs)
     except ImportError as ex:
         infodict = module_stdinfo_dict(None, name=pipname_)
         suggested_fix = 'pip install ' + pipname_
         if not sys.platform.startswith('win32'):
             suggested_fix = 'sudo ' + suggested_fix
         return False, 'None', target, infodict, ut.formatex(ex), suggested_fix
     except Exception as ex:
         infodict = module_stdinfo_dict(None, name=pipname_)
         return False, 'None', target, infodict, ut.formatex(ex), 'Some unknown error in ' + packagename
     current_version = infodict['__version__']
     # Build status text
     msg = ut.dict_str(infodict, strvals=True)
     msg += '\n' + '%s: %r >= (target=%r)?' % (funcname, current_version, target)
     statustext = ut.msgblock(infodict['__name__'], msg)
     # Check if passed
     passed = version_ge_target(current_version, target)
     # Suggest possible fix
     if not passed:
         suggested_fix = 'pip install ' + infodict['__name__'] + ' --upgrade'
         if not sys.platform.startswith('win32'):
             suggested_fix = 'sudo ' + suggested_fix
     return passed, current_version, target, infodict, statustext, suggested_fix
Ejemplo n.º 11
0
def _process_parallel(func, args_list, args_dict={}, nTasks=None, quiet=QUIET, pool=None):
    """
    Parallel process map

    Use generate instead
    """
    # Define progress observers
    if nTasks is None:
        nTasks = len(args_list)
    lbl = '(parproc) %s: ' % (get_funcname(func),)
    _prog = util_progress.ProgressIter(
        range(nTasks), nTotal=nTasks, lbl=lbl,
        adjust=True)
    _prog_iter = iter(_prog)
    num_tasks_returned_ptr = [0]
    def _callback(result):
        six.next(_prog_iter)
        num_tasks_returned_ptr[0] += 1
    # Send all tasks to be executed asynconously
    apply_results = [pool.apply_async(func, args, args_dict, _callback)
                     for args in args_list]
    # Wait until all tasks have been processed
    while num_tasks_returned_ptr[0] < nTasks:
        #print('Waiting: ' + str(num_tasks_returned_ptr[0]) + '/' + str(nTasks))
        pass
    # Get the results
    result_list = [ap.get() for ap in apply_results]
    if __EAGER_JOIN__:
        if USE_GLOBAL_POOL:
            close_pool(quiet=quiet)
        else:
            pool.close()
            pool.join()
    return result_list
Ejemplo n.º 12
0
 def profile_withfuncname_filter(func):
     # Test to see if this function is specified
     if _profile_func_flag(meta_util_six.get_funcname(func)):
         if __DEBUG_PROF__:
             print('profile func %r' % (func, ))
         return PROFILE_FUNC(func)
     return func
Ejemplo n.º 13
0
 def wrp_interested(*args, **kwargs):
     sys.stdout.write('#\n')
     sys.stdout.write('#\n')
     sys.stdout.write(
         '<!INTERESTED>: ' + meta_util_six.get_funcname(func) + '\n')
     print('INTERESTING... ' + (' ' * 30) + ' <----')
     return func(*args, **kwargs)
Ejemplo n.º 14
0
def __argv_flag_dec(func, default=False, quiet=QUIET, indent=False):
    """
    Logic for controlling if a function gets called based on command line
    """
    from utool import util_decor
    flagname = meta_util_six.get_funcname(func)
    if flagname.find('no') == 0:
        flagname = flagname[2:]

    flags = (
        '--' + flagname.replace('_', '-'),
        '--' + flagname,
    )

    @util_decor.ignores_exc_tb(outer_wrapper=False)
    def GaurdWrapper(*args, **kwargs):
        from utool import util_print
        # FIXME: the --print-all is a hack
        default_ = kwargs.pop('default', default)
        alias_flags = kwargs.pop('alias_flags', [])
        is_flagged = (get_argflag(flags, default_) or
                      get_argflag('--print-all') or
                      any([get_argflag(_) for _ in alias_flags]))
        if flagname in kwargs:
            is_flagged = kwargs.pop(flagname)
        if is_flagged:
            func_label = flags[0].replace('--', '').replace('print-', '')
            # print('')
            print('\n+ --- ' + func_label + ' ___')
            use_indent = indent is not False
            if indent is True:
                indent_ = '[%s]' % func_label
            else:
                indent_ = indent
            with util_print.Indenter(indent_, enabled=use_indent):
                ret = func(*args, **kwargs)
            print('L ___ ' + func_label + '___\n')
            return ret
        else:
            PRINT_DISABLED_FLAGDEC = not get_argflag(
                '--noinform', help_='does not print disabled flag decorators')
            if not quiet and PRINT_DISABLED_FLAGDEC:
                #print('\n~~~ %s ~~~' % flag)
                print('~~~ %s ~~~' % flags[0])
    meta_util_six.set_funcname(GaurdWrapper, meta_util_six.get_funcname(func))
    return GaurdWrapper
Ejemplo n.º 15
0
def _inject_funcs(module, *func_list):
    for func in func_list:
        if (module is not None and hasattr(module, '__name__')
                and module.__name__ not in __INJECT_BLACKLIST__
                and not module.__name__.startswith('six')
                and not module.__name__.startswith('sys')):
            #print('setting: %s.%s = %r' % (module.__name__, meta_util_six.get_funcname(func), func))
            setattr(module, meta_util_six.get_funcname(func), func)
Ejemplo n.º 16
0
def __argv_flag_dec(func, default=False, quiet=QUIET, indent=False):
    """
    Logic for controlling if a function gets called based on command line
    """
    from utool import util_decor
    flagname = meta_util_six.get_funcname(func)
    if flagname.find('no') == 0:
        flagname = flagname[2:]

    flags = (
        '--' + flagname.replace('_', '-'),
        '--' + flagname,
    )

    @util_decor.ignores_exc_tb(outer_wrapper=False)
    def GaurdWrapper(*args, **kwargs):
        from utool import util_print
        # FIXME: the --print-all is a hack
        default_ = kwargs.pop('default', default)
        alias_flags = kwargs.pop('alias_flags', [])
        is_flagged = (get_argflag(flags, default_)
                      or get_argflag('--print-all')
                      or any([get_argflag(_) for _ in alias_flags]))
        if is_flagged:
            func_label = flags[0].replace('--', '').replace('print-', '')
            print('')
            print('\n+ --- ' + func_label + ' ___')
            use_indent = indent is not False
            if indent is True:
                indent_ = '[%s]' % func_label
            else:
                indent_ = indent
            with util_print.Indenter(indent_, enabled=use_indent):
                ret = func(*args, **kwargs)
            print('L ___ ' + func_label + '___\n')
            return ret
        else:
            PRINT_DISABLED_FLAGDEC = not get_argflag(
                '--noinform', help_='does not print disabled flag decorators')
            if not quiet and PRINT_DISABLED_FLAGDEC:
                #print('\n~~~ %s ~~~' % flag)
                print('~~~ %s ~~~' % flags[0])

    meta_util_six.set_funcname(GaurdWrapper, meta_util_six.get_funcname(func))
    return GaurdWrapper
Ejemplo n.º 17
0
 def checkqterr_wrapper(self, *args, **kwargs):
     try:
         result = func(self, *args, **kwargs)
     except Exception as ex:
         funcname = meta_util_six.get_funcname(func)
         msg = 'caught exception in %r' % (funcname,)
         ut.printex(ex, msg, tb=True, pad_stdout=True)
         raise
     return result
Ejemplo n.º 18
0
 def checkqterr_wrapper(self, *args, **kwargs):
     try:
         result = func(self, *args, **kwargs)
     except Exception as ex:
         funcname = meta_util_six.get_funcname(func)
         msg = 'caught exception in %r' % (funcname,)
         ut.printex(ex, msg, tb=True, pad_stdout=True)
         raise
     return result
Ejemplo n.º 19
0
def tracefunc(func):
    lbl = '[trace.' + meta_util_six.get_funcname(func) + ']'
    def wrp_tracefunc(*args, **kwargs):
        print(lbl + ' +--- ENTER ---')
        with util_print.Indenter(lbl + ' |'):
            ret = func(*args, **kwargs)
        print(lbl + ' L___ EXIT ____')
        return ret
    return wrp_tracefunc
Ejemplo n.º 20
0
def _inject_funcs(module, *func_list):
    for func in func_list:
        if (module is not None and
                hasattr(module, '__name__') and
                module.__name__ not in __INJECT_BLACKLIST__ and
                not module.__name__.startswith('six') and
                not module.__name__.startswith('sys')):
            #print('setting: %s.%s = %r' % (module.__name__, meta_util_six.get_funcname(func), func))
            setattr(module, meta_util_six.get_funcname(func), func)
Ejemplo n.º 21
0
 def __new__(cls, name, bases, dct):
     """
     Args:
         cls (type): meta
         name (str): classname
         supers (list): bases
         dct (dict): class dictionary
     """
     method_list = get_comparison_methods()
     for func in method_list:
         if get_funcname(func) not in dct:
             funcname = get_funcname(func)
             dct[funcname] = func
         else:
             funcname = get_funcname(func)
             dct['meta_' + funcname] = func
         #ut.inject_func_as_method(metaself, func)
     return type.__new__(cls, name, bases, dct)
Ejemplo n.º 22
0
 def __new__(cls, name, bases, dct):
     """
     Args:
         cls (type): meta
         name (str): classname
         supers (list): bases
         dct (dict): class dictionary
     """
     method_list = get_comparison_methods()
     for func in method_list:
         if get_funcname(func) not in dct:
             funcname = get_funcname(func)
             dct[funcname] = func
         else:
             funcname = get_funcname(func)
             dct['meta_' + funcname] = func
         #ut.inject_func_as_method(metaself, func)
     return type.__new__(cls, name, bases, dct)
Ejemplo n.º 23
0
 def ex_wrapper(*args, **kwargs):
     try:
         return func(*args, **kwargs)
     except Exception as ex:
         import utool
         msg = ('[tools] ERROR: %s(%r, %r)' % (meta_util_six.get_funcname(func), args, kwargs))
         #print(msg)
         utool.printex(ex, msg)
         #print('[tools] ERROR: %r' % ex)
         raise
Ejemplo n.º 24
0
def _generate_parallel(func, args_list, ordered=True, chunksize=1,
                       prog=True, verbose=True, nTasks=None, freq=None):
    """
    Parallel process generator
    """
    prog = prog and verbose
    if nTasks is None:
        nTasks = len(args_list)
    if chunksize is None:
        chunksize = max(1, nTasks // (__POOL__._processes ** 2))
    if verbose:
        prefix = '[util_parallel._generate_parallel]'
        fmtstr = prefix + 'executing %d %s tasks using %d processes with chunksize=%r'
        print(fmtstr % (nTasks, get_funcname(func), __POOL__._processes, chunksize))
    pmap_func = __POOL__.imap if ordered else __POOL__.imap_unordered
    raw_generator = pmap_func(func, args_list, chunksize)
    # Get iterator with or without progress
    result_generator = (
        util_progress.ProgressIter(raw_generator, nTotal=nTasks, lbl=get_funcname(func) + ': ', freq=freq)
        if prog else raw_generator
    )
    if __TIME_GENERATE__:
        tt = util_time.tic('_generate_parallel func=' + get_funcname(func))
    try:
        for result in result_generator:
            yield result
        if __EAGER_JOIN__:
            close_pool()
    except Exception as ex:
        util_dbg.printex(ex, 'Parallel Generation Failed!', '[utool]', tb=True)
        if __EAGER_JOIN__:
            close_pool()
        print('__SERIAL_FALLBACK__ = %r' % __SERIAL_FALLBACK__)
        if __SERIAL_FALLBACK__:
            print('Trying to handle error by falling back to serial')
            serial_generator = _generate_serial(
                func, args_list, prog=prog, verbose=verbose, nTasks=nTasks, freq=freq)
            for result in serial_generator:
                yield result
        else:
            raise
    if __TIME_GENERATE__:
        util_time.toc(tt)
Ejemplo n.º 25
0
 def ex_wrapper(*args, **kwargs):
     try:
         return func(*args, **kwargs)
     except Exception as ex:
         import utool
         msg = ('[tools] ERROR: %s(%r, %r)' % (meta_util_six.get_funcname(func), args, kwargs))
         #print(msg)
         utool.printex(ex, msg)
         #print('[tools] ERROR: %r' % ex)
         raise
Ejemplo n.º 26
0
 def pyqtSlotWrapper(func):
     #printDBG('[GUITOOL._SLOT] Wrapping: %r' % func.__name__)
     funcname = meta_util_six.get_funcname(func)
     @QtCore.pyqtSlot(*types, name=funcname)
     @ut.ignores_exc_tb
     def slot_wrapper(self, *args, **kwargs):
         result = func(self, *args, **kwargs)
         return result
     slot_wrapper = functools.update_wrapper(slot_wrapper, func)
     return slot_wrapper
Ejemplo n.º 27
0
 def __new__(cls, name, bases, dct):
     """
     cls - meta
     name - classname
     supers - bases
     dct - class dictionary
     """
     #assert 'get_cfgstr_list' in dct, (
     #  'must have defined get_cfgstr_list.  name=%r' % (name,))
     # Inject registered function
     for func in methods_list:
         if get_funcname(func) not in dct:
             funcname = get_funcname(func)
             dct[funcname] = func
         else:
             funcname = get_funcname(func)
             dct['meta_' + funcname] = func
         #ut.inject_func_as_method(metaself, func)
     return type.__new__(cls, name, bases, dct)
Ejemplo n.º 28
0
def process(func, args_list, args_dict={}, force_serial=__FORCE_SERIAL__,
            nTasks=None):
    """
    Use ut.generate rather than ut.process

    Args:
        func (func):
        args_list (list or iter):
        args_dict (dict):
        force_serial (bool):

    Returns:
        result of parallel map(func, args_list)

    CommandLine:
        python -m utool.util_parallel --test-process

    Example:
        >>> # SLOW_DOCTEST
        >>> import utool as ut
        >>> num = 8700  # parallel is slower for smaller numbers
        >>> flag_generator0 = ut.process(ut.is_prime, zip(range(0, num)), force_serial=True)
        >>> flag_list0 = list(flag_generator0)
        >>> flag_generator1 = ut.process(ut.is_prime, zip(range(0, num)), force_serial=False)
        >>> flag_list1 = list(flag_generator1)
        >>> assert flag_list0 == flag_list1
    """

    ensure_pool()
    if nTasks is None:
        nTasks = len(args_list)
    if __POOL__ == 1 or force_serial:
        if not QUIET:
            print('[util_parallel] executing %d %s tasks in serial' %
                  (nTasks, get_funcname(func)))
        result_list = _process_serial(func, args_list, args_dict, nTasks=nTasks)
    else:
        if not QUIET:
            print('[util_parallel] executing %d %s tasks using %d processes' %
                  (nTasks, get_funcname(func), __POOL__._processes))
        result_list = _process_parallel(func, args_list, args_dict, nTasks=nTasks)
    return result_list
Ejemplo n.º 29
0
 def wrp_setter(*args, **kwargs):
     if DEBUG_SETTERS or VERB_CONTROL:
         print('+------')
         print('[SET]: ' + get_funcname(func))
         print('[SET]: called by: ' + ut.get_caller_name(range(1, 7)))
         funccall_str = ut.func_str(func, args, kwargs, packed=True)
         print('\n' + funccall_str + '\n')
         print('L------')
         #builtins.print('\n' + funccall_str + '\n')
     #print('set: funcname=%r, args=%r, kwargs=%r' % (get_funcname(func), args, kwargs))
     return func_(*args, **kwargs)
Ejemplo n.º 30
0
 def wrp_setter(*args, **kwargs):
     if DEBUG_SETTERS or VERB_CONTROL:
         print("+------")
         print("[SET]: " + get_funcname(func))
         print("[SET]: called by: " + ut.get_caller_name(range(1, 7)))
         funccall_str = ut.func_str(func, args, kwargs, packed=True)
         print("\n" + funccall_str + "\n")
         print("L------")
         # builtins.print('\n' + funccall_str + '\n')
     # print('set: funcname=%r, args=%r, kwargs=%r' % (get_funcname(func), args, kwargs))
     return func_(*args, **kwargs)
Ejemplo n.º 31
0
 def wrp_getter(*args, **kwargs):
     #if ut.DEBUG:
     #    print('[IN GETTER] args=%r' % (args,))
     #    print('[IN GETTER] kwargs=%r' % (kwargs,))
     if DEBUG_GETTERS  or VERB_CONTROL:
         print('+------')
         print('[GET]: ' + get_funcname(func))
         funccall_str = ut.func_str(func, args, kwargs, packed=True)
         print('\n' + funccall_str + '\n')
         print('L------')
     return func_(*args, **kwargs)
Ejemplo n.º 32
0
 def wrp_getter(*args, **kwargs):
     # if ut.DEBUG:
     #    print('[IN GETTER] args=%r' % (args,))
     #    print('[IN GETTER] kwargs=%r' % (kwargs,))
     if DEBUG_GETTERS or VERB_CONTROL:
         print("+------")
         print("[GET]: " + get_funcname(func))
         funccall_str = ut.func_str(func, args, kwargs, packed=True)
         print("\n" + funccall_str + "\n")
         print("L------")
     return func_(*args, **kwargs)
Ejemplo n.º 33
0
def _generate_serial(func, args_list, prog=True, verbose=True, nTasks=None, freq=None):
    """ internal serial generator  """
    if nTasks is None:
        nTasks = len(args_list)
    if verbose:
        print('[util_parallel._generate_serial] executing %d %s tasks in serial' %
                (nTasks, get_funcname(func)))
    prog = prog and verbose and nTasks > 1
    # Get iterator with or without progress
    args_iter = (
        util_progress.ProgressIter(args_list, nTotal=nTasks, lbl=get_funcname(func) + ': ', freq=freq)
        if prog else args_list
    )
    if __TIME_GENERATE__:
        tt = util_time.tic('_generate_serial func=' + get_funcname(func))
    for args in args_iter:
        result = func(args)
        yield result
    if __TIME_GENERATE__:
        util_time.toc(tt)
Ejemplo n.º 34
0
def inject_func_as_method(self, func, method_name=None, class_=None, allow_override=False, allow_main=False):
    """ Injects a function into an object as a method

    Wraps func as a bound method of self. Then injects func into self
    It is preferable to use make_class_method_decorator and inject_instance

    Args:
       self (object): class instance
       func : some function whos first arugment is a class instance
       method_name (str) : default=func.__name__, if specified renames the method
       class_ (type) : if func is an unbound method of this class


    References:
        http://stackoverflow.com/questions/1015307/python-bind-an-unbound-method
    """
    if method_name is None:
        method_name = get_funcname(func)
    #printDBG('Injecting method_name=%r' % method_name)
    old_method = getattr(self, method_name, None)
    #import utool as ut
    #ut.embed()

    # Bind function to the class instance
    #new_method = types.MethodType(func, self, self.__class__)
    new_method = func.__get__(self, self.__class__)
    #new_method = profile(func.__get__(self, self.__class__))

    if old_method is not None:
        if not allow_main and (
                old_method.im_func.func_globals['__name__'] != '__main__' and
                new_method.im_func.func_globals['__name__'] == '__main__'):
            if True or VERBOSE_CLASS:
                print('[util_class] skipping re-inject of %r from __main__' % method_name)
            return
        if old_method is new_method or old_method.im_func is new_method.im_func:
            print('WARNING: Injecting the same function twice: %r' % new_method)
        elif allow_override is False:
            raise AssertionError('Overrides are not allowed. Already have method_name=%r' % (method_name))
        elif allow_override == 'warn':
            print('WARNING: Overrides are not allowed. Already have method_name=%r. Skipping' % (method_name))
            return
        elif allow_override == 'override+warn':
            #import utool as ut
            #ut.embed()
            print('WARNING: Overrides are allowed, but dangerous. method_name=%r.' % (method_name))
            print('old_method = %r, im_func=%s' % (old_method, str(old_method.im_func)))
            print('new_method = %r, im_func=%s' % (new_method, str(new_method.im_func)))
            print(old_method.im_func.func_globals['__name__'])
            print(new_method.im_func.func_globals['__name__'])
        # TODO: does this actually decrement the refcount enough?
        del old_method
    setattr(self, method_name, new_method)
Ejemplo n.º 35
0
    def pyqtSlotWrapper(func):
        # printDBG('[GUITOOL._SLOT] Wrapping: %r' % func.__name__)
        funcname = meta_util_six.get_funcname(func)

        @QtCore.pyqtSlot(*types, name=funcname)
        @ut.ignores_exc_tb
        def slot_wrapper(self, *args, **kwargs):
            result = func(self, *args, **kwargs)
            return result

        slot_wrapper = functools.update_wrapper(slot_wrapper, func)
        return slot_wrapper
Ejemplo n.º 36
0
def inject_func_as_property(self, func, method_name=None, class_=None):
    """
    WARNING:
        properties are more safely injected using metaclasses

    References:
        http://stackoverflow.com/questions/13850114/dynamically-adding-methods-with-or-without-metaclass-in-python
    """
    if method_name is None:
        method_name = get_funcname(func)
    #new_method = func.__get__(self, self.__class__)
    new_property = property(func)
    setattr(self.__class__, method_name, new_property)
Ejemplo n.º 37
0
def inject_func_as_property(self, func, method_name=None, class_=None,
                            allow_override=False, allow_main=False,
                            verbose=True):
    """
    WARNING:
        properties are more safely injected using metaclasses

    References:
        http://stackoverflow.com/questions/13850114/dynamically-adding-methods-with-or-without-metaclass-in-python
    """
    if method_name is None:
        method_name = get_funcname(func)
    #new_method = func.__get__(self, self.__class__)
    new_property = property(func)
    setattr(self.__class__, method_name, new_property)
Ejemplo n.º 38
0
 def wrp_onexceptreport(*args, **kwargs):
     try:
         #import utool
         #if utool.DEBUG:
         #    print('[IN EXCPRPT] args=%r' % (args,))
         #    print('[IN EXCPRPT] kwargs=%r' % (kwargs,))
         return func(*args, **kwargs)
     except Exception as ex:
         from utool import util_str
         arg_strs = ', '.join([repr(util_str.truncate_str(str(arg))) for arg in args])
         kwarg_strs = ', '.join([util_str.truncate_str('%s=%r' % (key, val)) for key, val in six.iteritems(kwargs)])
         msg = ('\nERROR: funcname=%r,\n * args=%s,\n * kwargs=%r\n' % (meta_util_six.get_funcname(func), arg_strs, kwarg_strs))
         msg += ' * len(args) = %r\n' % len(args)
         msg += ' * len(kwargs) = %r\n' % len(kwargs)
         util_dbg.printex(ex, msg, pad_stdout=True)
         raise
Ejemplo n.º 39
0
def indent_func(input_):
    """
    Takes either no arguments or an alias label
    """
    if isinstance(input_, six.string_types):
        # A label was specified
        lbl = input_
        return _indent_decor(lbl)
    elif isinstance(input_, (bool, tuple)):
        # Allow individually turning of of this decorator
        func = input_
        return func
    else:
        # Use the function name as the label
        func = input_
        lbl = '[' + meta_util_six.get_funcname(func) + ']'
        return _indent_decor(lbl)(func)
Ejemplo n.º 40
0
def _process_serial(func, args_list, args_dict={}, nTasks=None, quiet=QUIET):
    """
    Serial process map

    Use generate instead
    """
    if nTasks is None:
        nTasks = len(args_list)
    result_list = []
    lbl = '(serproc) %s: ' % (get_funcname(func),)
    prog_iter = util_progress.ProgressIter(
        args_list, nTotal=nTasks, lbl=lbl, adjust=True)
    # Execute each task sequentially
    for args in prog_iter:
        result = func(*args, **args_dict)
        result_list.append(result)
    return result_list
Ejemplo n.º 41
0
def tracefunc_xml(func):
    """
    Causes output of function to be printed in an XML style block
    """
    funcname = meta_util_six.get_funcname(func)
    def wrp_tracefunc2(*args, **kwargs):
        verbose = kwargs.get('verbose', True)
        if verbose:
            print('<%s>' % (funcname,))
        with util_print.Indenter('    '):
            ret = func(*args, **kwargs)
        if verbose:
            print('</%s>' % (funcname,))
        return ret
    wrp_tracefunc2_ = ignores_exc_tb(wrp_tracefunc2)
    wrp_tracefunc2_ = preserve_sig(wrp_tracefunc2_, func)
    return wrp_tracefunc2_
Ejemplo n.º 42
0
def _run_benchmark(setup_, func_list, argstr, number=1000):
    import timeit
    setup = GLOBAL_SETUP + '\n' + setup_
    print('----------')
    print('BENCHMARK: ' + utool.get_caller_name())
    for func in func_list:
        funcname = func if isinstance(func, str) else get_funcname(func)
        print('Running: %s' % funcname)
        stmt = funcname + argstr
        try:
            total_time = timeit.timeit(stmt=stmt, setup=setup, number=number)
        except ImportError as ex:
            utool.printex(ex, iswarning=True)
        except Exception as ex:
            utool.printex(ex, iswarning=False)
            raise
        print(' * timed: %r seconds in %s' % (total_time, funcname))
    return locals()
Ejemplo n.º 43
0
def _process_parallel(func,
                      args_list,
                      args_dict={},
                      nTasks=None,
                      quiet=QUIET,
                      pool=None):
    """
    Parallel process map

    Use generate instead
    """
    # Define progress observers
    if nTasks is None:
        nTasks = len(args_list)
    lbl = '(parproc) %s: ' % (get_funcname(func), )
    _prog = util_progress.ProgressIter(range(nTasks),
                                       nTotal=nTasks,
                                       lbl=lbl,
                                       adjust=True)
    _prog_iter = iter(_prog)
    num_tasks_returned_ptr = [0]

    def _callback(result):
        six.next(_prog_iter)
        num_tasks_returned_ptr[0] += 1

    # Send all tasks to be executed asynconously
    apply_results = [
        pool.apply_async(func, args, args_dict, _callback)
        for args in args_list
    ]
    # Wait until all tasks have been processed
    while num_tasks_returned_ptr[0] < nTasks:
        #print('Waiting: ' + str(num_tasks_returned_ptr[0]) + '/' + str(nTasks))
        pass
    # Get the results
    result_list = [ap.get() for ap in apply_results]
    if __EAGER_JOIN__:
        if USE_GLOBAL_POOL:
            close_pool(quiet=quiet)
        else:
            pool.close()
            pool.join()
    return result_list
Ejemplo n.º 44
0
def _process_serial(func, args_list, args_dict={}, nTasks=None, quiet=QUIET):
    """
    Serial process map

    Use generate instead
    """
    if nTasks is None:
        nTasks = len(args_list)
    result_list = []
    lbl = '(serproc) %s: ' % (get_funcname(func), )
    prog_iter = util_progress.ProgressIter(args_list,
                                           nTotal=nTasks,
                                           lbl=lbl,
                                           adjust=True)
    # Execute each task sequentially
    for args in prog_iter:
        result = func(*args, **args_dict)
        result_list.append(result)
    return result_list
Ejemplo n.º 45
0
def _run_benchmark(setup_, func_list, argstr, number=1000):
    import timeit

    setup = GLOBAL_SETUP + "\n" + setup_
    print("----------")
    print("BENCHMARK: " + utool.get_caller_name())
    for func in func_list:
        funcname = func if isinstance(func, str) else get_funcname(func)
        print("Running: %s" % funcname)
        stmt = funcname + argstr
        try:
            total_time = timeit.timeit(stmt=stmt, setup=setup, number=number)
        except ImportError as ex:
            utool.printex(ex, iswarning=True)
        except Exception as ex:
            utool.printex(ex, iswarning=False)
            raise
        print(" * timed: %r seconds in %s" % (total_time, funcname))
    return locals()
Ejemplo n.º 46
0
def execstr_func(func):
    print(' ! Getting executable source for: ' + meta_util_six.get_funcname(func))
    _src = inspect.getsource(func)
    execstr = textwrap.dedent(_src[_src.find(':') + 1:])
    # Remove return statments
    while True:
        stmtx = execstr.find('return')  # Find first 'return'
        if stmtx == -1:
            break  # Fail condition
        # The characters which might make a return not have its own line
        stmt_endx = len(execstr) - 1
        for stmt_break in '\n;':
            print(execstr)
            print('')
            print(stmtx)
            stmt_endx_new = execstr[stmtx:].find(stmt_break)
            if -1 < stmt_endx_new < stmt_endx:
                stmt_endx = stmt_endx_new
        # now have variables stmt_x, stmt_endx
        before = execstr[:stmtx]
        after  = execstr[stmt_endx:]
        execstr = before + after
    return execstr
Ejemplo n.º 47
0
 def make_redirect(func):
     # PRESERVES ALL SIGNATURES WITH EXECS
     src_fmt = r'''
     def {funcname}{defsig}:
         """ {orig_docstr}"""
         return {orig_funcname}{callsig}
     '''
     from utool._internal import meta_util_six
     orig_docstr = meta_util_six.get_funcdoc(func)
     funcname = meta_util_six.get_funcname(func)
     orig_funcname = modname.split('.')[-1] + '.' + funcname
     orig_docstr = '' if orig_docstr is None else orig_docstr
     import textwrap
     # Put wrapped function into a scope
     import inspect
     argspec = inspect.getargspec(func)
     (args, varargs, varkw, defaults) = argspec
     defsig = inspect.formatargspec(*argspec)
     callsig = inspect.formatargspec(*argspec[0:3])
     src_fmtdict = dict(funcname=funcname, orig_funcname=orig_funcname,
                        defsig=defsig, callsig=callsig,
                        orig_docstr=orig_docstr)
     src = textwrap.dedent(src_fmt).format(**src_fmtdict)
     return src
Ejemplo n.º 48
0
def inject_func_as_method(self, func, method_name=None, class_=None,
                          allow_override=False, allow_main=False,
                          verbose=True, override=None, force=False):
    """ Injects a function into an object as a method

    Wraps func as a bound method of self. Then injects func into self
    It is preferable to use make_class_method_decorator and inject_instance

    Args:
       self (object): class instance
       func : some function whos first arugment is a class instance
       method_name (str) : default=func.__name__, if specified renames the method
       class_ (type) : if func is an unbound method of this class


    References:
        http://stackoverflow.com/questions/1015307/python-bind-an-unbound-method
    """
    if override is not None:
        # TODO depcirate allow_override
        allow_override = override
    if method_name is None:
        method_name = get_funcname(func)
    if force:
        allow_override = True
        allow_main = True
    old_method = getattr(self, method_name, None)
    # Bind function to the class instance
    #new_method = types.MethodType(func, self, self.__class__)
    new_method = func.__get__(self, self.__class__)
    #new_method = profile(func.__get__(self, self.__class__))

    if old_method is not None:
        old_im_func = get_method_func(old_method)
        new_im_func = get_method_func(new_method)
        if not allow_main and old_im_func is not None and (
                get_funcglobals(old_im_func)['__name__'] != '__main__' and
                get_funcglobals(new_im_func)['__name__'] == '__main__'):
            if True or VERBOSE_CLASS:
                print('[util_class] skipping re-inject of %r from __main__' % method_name)
            return
        if old_method is new_method or old_im_func is new_im_func:
            #if verbose and util_arg.NOT_QUIET:
            #    print('WARNING: Skipping injecting the same function twice: %r' % new_method)
                #print('WARNING: Injecting the same function twice: %r' % new_method)
            return
        elif allow_override is False:
            raise AssertionError(
                'Overrides are not allowed. Already have method_name=%r' %
                (method_name))
        elif allow_override == 'warn':
            print(
                'WARNING: Overrides are not allowed. Already have method_name=%r. Skipping' %
                (method_name))
            return
        elif allow_override == 'override+warn':
            #import utool as ut
            #ut.embed()
            print('WARNING: Overrides are allowed, but dangerous. method_name=%r.' %
                  (method_name))
            print('old_method = %r, im_func=%s' % (old_method, str(old_im_func)))
            print('new_method = %r, im_func=%s' % (new_method, str(new_im_func)))
            print(get_funcglobals(old_im_func)['__name__'])
            print(get_funcglobals(new_im_func)['__name__'])
        # TODO: does this actually decrement the refcount enough?
        del old_method
    setattr(self, method_name, new_method)
Ejemplo n.º 49
0
def _generate_parallel(func,
                       args_list,
                       ordered=True,
                       chunksize=None,
                       prog=True,
                       verbose=True,
                       quiet=QUIET,
                       nTasks=None,
                       **kwargs):
    """
    Parallel process generator
    """
    global __POOL__
    if USE_GLOBAL_POOL:
        global __POOL__
        pool = __POOL__
    else:
        # Maybe global pools are bad?
        pool = new_pool(num_procs=get_default_numprocs(),
                        init_worker=init_worker,
                        maxtasksperchild=None)
        #pool = new_pool()

    prog = prog and verbose
    if nTasks is None:
        nTasks = len(args_list)
    if chunksize is None:
        chunksize = max(min(4, nTasks), min(8, nTasks // (pool._processes**2)))
    if verbose or VERBOSE_PARALLEL:
        prefix = '[util_parallel._generate_parallel]'
        fmtstr = (prefix +
                  'executing %d %s tasks using %d processes with chunksize=%r')
        print(fmtstr %
              (nTasks, get_funcname(func), pool._processes, chunksize))

    #import utool as ut
    #buffered = ut.get_argflag('--buffered')
    #buffered = False
    #if buffered:
    #    # current tests indicate that normal pool.imap is faster than buffered
    #    # generation
    #    source_gen = (func(args) for args in args_list)
    #    raw_generator = buffered_generator(source_gen)
    #else:
    pmap_func = pool.imap if ordered else pool.imap_unordered
    raw_generator = pmap_func(func, args_list, chunksize)

    # Get iterator with or without progress
    if prog:
        lbl = '(pargen) %s: ' % (get_funcname(func), )
        result_generator = util_progress.ProgressIter(
            raw_generator,
            nTotal=nTasks,
            lbl=lbl,
            freq=kwargs.get('freq', None),
            backspace=kwargs.get('backspace', True),
            adjust=kwargs.get('adjust', False))

    else:
        result_generator = raw_generator

    if __TIME_GENERATE__:
        tt = util_time.tic('_generate_parallel func=' + get_funcname(func))
    try:
        # Start generating
        for result in result_generator:
            yield result
        if __EAGER_JOIN__:
            if USE_GLOBAL_POOL:
                close_pool(quiet=quiet)
            else:
                pool.close()
                pool.join()
    except Exception as ex:
        util_dbg.printex(ex, 'Parallel Generation Failed!', '[utool]', tb=True)
        if __EAGER_JOIN__:
            if USE_GLOBAL_POOL:
                close_pool(quiet=quiet)
            else:
                pool.close()
                pool.join()
        print('__SERIAL_FALLBACK__ = %r' % __SERIAL_FALLBACK__)
        if __SERIAL_FALLBACK__:
            print('Trying to handle error by falling back to serial')
            serial_generator = _generate_serial(func,
                                                args_list,
                                                prog=prog,
                                                verbose=verbose,
                                                nTasks=nTasks,
                                                **kwargs)
            for result in serial_generator:
                yield result
        else:
            raise
    if __TIME_GENERATE__:
        util_time.toc(tt)
Ejemplo n.º 50
0
def assert_modules():
    """
    checkinfo functions return info_dict
    checkinfo_func

    CommandLine:
        python -m ibeis.tests.assert_modules --test-assert_modules

    Example:
        >>> # DOCTEST_ENABLE
        >>> from ibeis.tests.assert_modules import *   # NOQA
        >>> detailed_msg = assert_modules()
        >>> print(detailed_msg)
    """

    MACHINE_NAME = ut.get_computer_name()

    machine_info_lines = []

    machine_info_lines.append('sys.version = %r ' % (sys.version))
    machine_info_lines.append('PATH = ' + ut.list_str(ut.get_path_dirs()))
    machine_info_lines.append('\n\n\n============================')
    machine_info_lines.append('Begining assert modules main')
    machine_info_lines.append('* MACHINE_NAME = %r' % MACHINE_NAME)
    machine_info_text = '\n'.join(machine_info_lines)
    print(machine_info_text)

    statustext_list = []
    failed_list = []
    fix_list = []

    SHOW_STATUS = not ut.get_argflag(('--nostatus', '--nostat'))

    for checkinfo_wrapper in ASSERT_FUNCS:
        passed, current_version, target, infodict, statustext, suggested_fix = checkinfo_wrapper()
        funcname = get_funcname(checkinfo_wrapper)
        if SHOW_STATUS:
            statustext_list.append(statustext)
        if passed:
            statustext_list.append(funcname + ' ' + str(infodict['__version__']) + ' passed')
            #statustext_list.append('')
        else:
            failed_list.append(funcname + ' FAILED!!!')
            fix_list.append(suggested_fix)
            statustext_list.append(funcname + ' FAILED!!!')
        if SHOW_STATUS:
            statustext_list.append('')

    output_text = '\n'.join(statustext_list)

    failed_text = '\n'.join(failed_list)
    print(output_text)
    print(failed_text)
    check_exist_text = check_modules_exists()
    print(check_exist_text)
    fix_text = ''
    if len(fix_list) > 0:
        fix_text += ('suggested fixes:\n')
        fix_text += ('\n'.join(fix_list) + '\n')
        print(fix_text)

    detailed_msg = '\n'.join([
        machine_info_text,
        output_text,
        failed_text,
        check_exist_text,
        fix_text,
    ])

    return detailed_msg
Ejemplo n.º 51
0
 def prof_wrapper(*args, **kwargs):
     import utool as ut
     with ut.Timer(meta_util_six.get_funcname(func)):
         return func(*args, **kwargs)
Ejemplo n.º 52
0
 def wrp_deleter(*args, **kwargs):
     if VERB_CONTROL:
         print('[DELETE]: ' + get_funcname(func))
         builtins.print('\n' + ut.func_str(func, args, kwargs) + '\n')
     return func_(*args, **kwargs)
Ejemplo n.º 53
0
def process(func,
            args_list,
            args_dict={},
            force_serial=None,
            nTasks=None,
            quiet=QUIET):
    """
    Use ut.generate rather than ut.process

    Args:
        func (func):
        args_list (list or iter):
        args_dict (dict):
        force_serial (bool):

    Returns:
        result of parallel map(func, args_list)

    CommandLine:
        python -m utool.util_parallel --test-process

    Example:
        >>> # SLOW_DOCTEST
        >>> import utool as ut
        >>> num = 8700  # parallel is slower for smaller numbers
        >>> flag_generator0 = ut.process(ut.is_prime, list(zip(range(0, num))), force_serial=True)
        >>> flag_list0 = list(flag_generator0)
        >>> flag_generator1 = ut.process(ut.is_prime, list(zip(range(0, num))), force_serial=False)
        >>> flag_list1 = list(flag_generator1)
        >>> assert flag_list0 == flag_list1
    """
    if force_serial is None:
        force_serial = __FORCE_SERIAL__

    if USE_GLOBAL_POOL:
        ensure_pool(quiet=quiet)
    if nTasks is None:
        nTasks = len(args_list)
    if __POOL__ == 1 or force_serial:
        if not QUIET:
            print('[util_parallel] executing %d %s tasks in serial' %
                  (nTasks, get_funcname(func)))
        result_list = _process_serial(func,
                                      args_list,
                                      args_dict,
                                      nTasks=nTasks,
                                      quiet=quiet)
    else:
        if __POOL__ is None:
            pool = new_pool(num_procs=get_default_numprocs(),
                            init_worker=init_worker,
                            maxtasksperchild=None)
        else:
            pool = __POOL__
        if not QUIET:
            print('[util_parallel] executing %d %s tasks using %d processes' %
                  (nTasks, get_funcname(func), pool._processes))
        result_list = _process_parallel(func,
                                        args_list,
                                        args_dict,
                                        nTasks=nTasks,
                                        quiet=quiet,
                                        pool=pool)
    return result_list
Ejemplo n.º 54
0
    uuid_ = get_image_uuid(img_bytes_)
    return uuid_


if __name__ == '__main__':
    multiprocessing.freeze_support()  # win32
    test_funcs = [
        make_uuid_PIL_bytes,
        make_uuid_NUMPY_bytes,
        make_uuid_NUMPY_STRIDE_16_bytes,
        make_uuid_NUMPY_STRIDE_64_bytes,
        make_uuid_CONTIG_NUMPY_bytes,
        make_uuid_CONTIG_NUMPY_STRIDE_16_bytes,
        make_uuid_CONTIG_NUMPY_STRIDE_64_bytes,
    ]
    func_strs = ', '.join([get_funcname(func) for func in test_funcs])
    # cool trick
    setup = 'from __main__ import (gpath, %s) ' % (func_strs, )

    number = 10

    for func in test_funcs:
        funcname = get_funcname(func)
        print('Running: %s' % funcname)
        if __LINE_PROFILE__:
            start = time.time()
            for _ in range(number):
                func(gpath)
            total_time = time.time() - start
        else:
            import timeit