Пример #1
0
 def _spawner(func, *args, **kwargs):
     if wait != 0:
         print('Waiting for background process (%s) to spin up' % (ut.get_funcname(func,)))
     proc = ut.spawn_background_process(func, *args, **kwargs)
     time.sleep(wait)
     assert proc.is_alive(), 'proc (%s) died too soon' % (ut.get_funcname(func,))
     return proc
Пример #2
0
 def _spawner(func, *args, **kwargs):
     if wait != 0:
         print('Waiting for background process (%s) to spin up' %
               (ut.get_funcname(func, )))
     proc = ut.spawn_background_process(func, *args, **kwargs)
     # time.sleep(wait)
     assert proc.is_alive(), 'proc (%s) died too soon' % (
         ut.get_funcname(func, ))
     return proc
Пример #3
0
 def format_cell(cell):
     if ut.is_funclike(cell):
         header = '# ' + ut.to_title_caps(ut.get_funcname(cell))
         code = (header, ut.get_func_sourcecode(cell, stripdef=True, stripret=True))
     else:
         code = (None, cell)
     return generate_notebook.format_cells(code)
Пример #4
0
        def _debug_button(func, r_next, refresh=True):
            def _simple_onevent(event):
                func()
                if refresh:
                    self.show_page()

            _add_button(ut.get_funcname(func), _simple_onevent, r_next())
Пример #5
0
        def _spawner(func, *args, **kwargs):

            if thread:
                # mp.set_start_method('spawn')
                _spawner_func_ = ut.spawn_background_daemon_thread
            else:
                _spawner_func_ = ut.spawn_background_process

            if wait != 0:
                print('Waiting for background process (%s) to spin up' %
                      (ut.get_funcname(func, )))
            proc = _spawner_func_(func, *args, **kwargs)
            # time.sleep(wait)
            assert proc.is_alive(), 'proc (%s) died too soon' % (
                ut.get_funcname(func, ))
            return proc
Пример #6
0
def gridsearch_image_function(param_info,
                              test_func,
                              args=tuple(),
                              show_func=None):
    """
    gridsearch for a function that produces a single image
    """
    import plottool as pt
    cfgdict_list, cfglbl_list = param_info.get_gridsearch_input(
        defaultslice=slice(0, 10))
    fnum = pt.ensure_fnum(None)
    if show_func is None:
        show_func = pt.imshow
    lbl = ut.get_funcname(test_func)
    cfgresult_list = [
        test_func(*args, **cfgdict)
        for cfgdict in ut.ProgressIter(cfgdict_list, lbl=lbl)
    ]
    onclick_func = None
    ut.interact_gridsearch_result_images(show_func,
                                         cfgdict_list,
                                         cfglbl_list,
                                         cfgresult_list,
                                         fnum=fnum,
                                         figtitle=lbl,
                                         unpack=False,
                                         max_plots=25,
                                         onclick_func=onclick_func)
    pt.iup()
Пример #7
0
 def sort_url(self, col_key, reverse=False):
     if reverse:
         direction = 'desc'
     else:
         direction = 'asc'
     return flask.url_for(ut.get_funcname(index),
                          sort=col_key,
                          direction=direction)
Пример #8
0
 def format_cell(cell):
     if ut.is_funclike(cell):
         header = '# ' + ut.to_title_caps(ut.get_funcname(cell))
         code = (header,
                 ut.get_func_sourcecode(cell, stripdef=True, stripret=True))
     else:
         code = (None, cell)
     return generate_notebook.format_cells(code)
Пример #9
0
def _register_nn_simple_weight_func(func):
    """
    Used for things that dont require a normalizer like const
    """
    filtkey = ut.get_funcname(func).replace('_match_weighter', '').lower()
    if ut.VERYVERBOSE:
        print('[nn_weights] registering simple func: %r' % (filtkey, ))
    NN_WEIGHT_FUNC_DICT[filtkey] = func
    return func
Пример #10
0
def _register_nn_simple_weight_func(func):
    """
    Used for things that dont require a normalizer like const, cos, and borda
    """
    filtkey = ut.get_funcname(func).replace('_match_weighter', '').lower()
    if ut.VERYVERBOSE:
        print('[nn_weights] registering simple func: %r' % (filtkey,))
    NN_WEIGHT_FUNC_DICT[filtkey] = func
    return func
Пример #11
0
def make_complete(r):
    import utool as ut
    import rob_interface
    modname = 'rob'
    testnames = [ut.get_funcname(func) for func in
                 ut.get_module_owned_functions(rob_interface)]
    line = 'complete -W "%s" "%s"' % (' '.join(testnames), modname)
    print('add the following line to your bashrc')
    print(line)
Пример #12
0
 def register_command(self, name):
     import utool as ut
     if ut.is_funclike(name):
         func = name
         name = ut.get_funcname(func)
         self._register_command(name, func)
         return func
     else:
         def _wrap(func):
             self._register_command(name, func)
         return _wrap
Пример #13
0
def partial_imap_1to1(func, si_func):
    import functools
    @functools.wraps(si_func)
    def wrapper(input_):
        if not ut.isiterable(input_):
            return func(si_func(input_))
        else:
            return list(map(func, si_func(input_)))
    ut.set_funcname(wrapper, ut.get_callable_name(func) + '_mapper_' +
                    ut.get_funcname(si_func))
    return wrapper
Пример #14
0
def make_complete(r):
    import utool as ut
    import rob_interface
    modname = 'rob'
    testnames = [
        ut.get_funcname(func)
        for func in ut.get_module_owned_functions(rob_interface)
    ]
    line = 'complete -W "%s" "%s"' % (' '.join(testnames), modname)
    print('add the following line to your bashrc')
    print(line)
Пример #15
0
    def register_command(self, name):
        import utool as ut
        if ut.is_funclike(name):
            func = name
            name = ut.get_funcname(func)
            self._register_command(name, func)
            return func
        else:

            def _wrap(func):
                self._register_command(name, func)

            return _wrap
Пример #16
0
def generate_all():
    r"""
    CommandLine:
        python -m ibeis.scripts.gen_cand_expts --exec-generate_all --vim
        python -m ibeis.scripts.gen_cand_expts --exec-generate_all
        python -m ibeis.scripts.gen_cand_expts --exec-generate_all --full
        ./experiments_overnight.sh

    Example:
        >>> from ibeis.scripts.gen_cand_expts import *  # NOQA
        >>> generate_all()
    """
    #script_names = ['sh ' + func()[0] for func in TEST_GEN_FUNCS]
    script_lines = ut.flatten([
        ['\n\n### ' + ut.get_funcname(func),
         '# python -m ibeis.scripts.gen_cand_expts --exec-' +
         ut.get_funcname(func)] + make_standard_test_scripts(func())[2]
        for func in TEST_GEN_FUNCS])
    fname, script, line_list = write_script_lines(script_lines, 'experiments_overnight.sh')
    if ut.get_argflag('--vim'):
        ut.editfile(fname)
    return fname, script, line_list
Пример #17
0
def generate_all():
    r"""
    CommandLine:
        python -m ibeis.scripts.gen_cand_expts --exec-generate_all --vim
        python -m ibeis.scripts.gen_cand_expts --exec-generate_all
        python -m ibeis.scripts.gen_cand_expts --exec-generate_all --full
        ./experiments_overnight.sh

    Example:
        >>> from ibeis.scripts.gen_cand_expts import *  # NOQA
        >>> generate_all()
    """
    #script_names = ['sh ' + func()[0] for func in TEST_GEN_FUNCS]
    script_lines = ut.flatten([
        ['\n\n### ' + ut.get_funcname(func),
         '# python -m ibeis.scripts.gen_cand_expts --exec-' +
         ut.get_funcname(func)] + make_standard_test_scripts(func())[2]
        for func in TEST_GEN_FUNCS])
    fname, script, line_list = write_script_lines(script_lines, 'experiments_overnight.sh')
    if ut.get_argflag('--vim'):
        ut.editfile(fname)
    return fname, script, line_list
Пример #18
0
def _register_nn_normalized_weight_func(func):
    r"""
    Decorator for weighting functions

    Registers a nearest neighbor normalized weighting
    Used for LNBNN
    """
    global NN_WEIGHT_FUNC_DICT
    filtkey = ut.get_funcname(func).replace('_fn', '').lower()
    if ut.VERYVERBOSE:
        print('[nn_weights] registering norm func: %r' % (filtkey, ))
    filtfunc = functools.partial(nn_normalized_weight, func)
    NN_WEIGHT_FUNC_DICT[filtkey] = filtfunc
    return func
Пример #19
0
def _register_nn_normalized_weight_func(func):
    r"""
    Decorator for weighting functions

    Registers a nearest neighbor normalized weighting
    Used for LNBNN
    """
    global NN_WEIGHT_FUNC_DICT
    filtkey = ut.get_funcname(func).replace('_fn', '').lower()
    if ut.VERYVERBOSE:
        print('[nn_weights] registering norm func: %r' % (filtkey,))
    filtfunc = functools.partial(nn_normalized_weight, func)
    NN_WEIGHT_FUNC_DICT[filtkey] = filtfunc
    return func
Пример #20
0
def ensure_task_table():
    if not hasattr(app, 'DBTaskTable'):

        class DBTaskTable(flask_table.Table):
            allow_sort = True

            def sort_url(self, col_key, reverse=False):
                if reverse:
                    direction = 'desc'
                else:
                    direction = 'asc'
                return flask.url_for(ut.get_funcname(index),
                                     sort=col_key,
                                     direction=direction)

        col_nice_lookup = {}
        columns = [
            'index',
            'dbname',
            ('task', task_link),
            # ('link', task_link)
        ]
        for tup in columns:
            if isinstance(tup, tuple):
                colname, link = tup
                colnice = col_nice_lookup.get(colname, colname)
                url_kwargs = {a: a for a in ut.get_func_argspec(link).args}
                endpoint = ut.get_funcname(link)
                link_kw = dict(
                    name=colnice,
                    attr=colname,
                    endpoint=endpoint,
                    url_kwargs=url_kwargs,
                    allow_sort=True,
                    show=True,
                )
                new_col = flask_table.LinkCol(**link_kw)
            elif isinstance(tup, six.string_types):
                colname = tup
                colnice = col_nice_lookup.get(colname, colname)
                new_col = flask_table.Col(name=colnice,
                                          attr=colname,
                                          allow_sort=True,
                                          show=True)
            else:
                assert False, 'unkonown tup'
            DBTaskTable.add_column(colname, new_col)
        app.DBTaskTable = DBTaskTable
    return app.DBTaskTable
Пример #21
0
def test_transforms():
    r"""
    CommandLine:
        python -m ibeis_cnn.augment --test-test_transforms --show

    Example:
        >>> # ENABLE_DOCTEST
        >>> from ibeis_cnn.augment import *  # NOQA
        >>> test_transforms()
    """
    from ibeis_cnn import ingest_data, utils, draw_results
    data, labels = ingest_data.testdata_patchmatch()
    cv2_data = utils.convert_theano_images_to_cv2_images(data)
    patches_ = cv2_data[::2]

    transform_list = ut.flatten(all_transforms)

    num_random = 5
    import vtool as vt
    for x in range(num_random):
        affine_kw = random_affine_kwargs()
        func = functools.partial(vt.affine_warp_around_center, **affine_kw)
        transform_list.append(func)

    orig_list   = []
    warped_list = []

    name_list = []

    for patch, func in zip(patches_, transform_list):
        if isinstance(func, functools.partial):
            name = ut.get_partial_func_name(func)
        else:
            name = ut.get_funcname(func)
        print(name)
        warped = func(patch)
        orig_list.append(patch)
        name_list.append(name)
        warped_list.append(warped)

    index_list = list(range(len(orig_list)))
    label_list = None
    tup = draw_results.get_patch_sample_img(orig_list, warped_list, label_list, {'text': name_list}, index_list, (1, len(index_list)))
    stacked_img, stacked_offsets, stacked_sfs = tup
    ut.quit_if_noshow()
    import plottool as pt
    pt.imshow(stacked_img)
    ut.show_if_requested()
Пример #22
0
def gridsearch_image_function(param_info, test_func, args=tuple(), show_func=None):
    """
    gridsearch for a function that produces a single image
    """
    import plottool as pt
    cfgdict_list, cfglbl_list = param_info.get_gridsearch_input(defaultslice=slice(0, 10))
    fnum = pt.ensure_fnum(None)
    if show_func is None:
        show_func = pt.imshow
    lbl = ut.get_funcname(test_func)
    cfgresult_list = [
        test_func(*args, **cfgdict)
        for cfgdict in ut.ProgressIter(cfgdict_list, lbl=lbl)
    ]
    onclick_func = None
    ut.interact_gridsearch_result_images(
        show_func, cfgdict_list, cfglbl_list,
        cfgresult_list, fnum=fnum,
        figtitle=lbl, unpack=False,
        max_plots=25, onclick_func=onclick_func)
    pt.iup()
Пример #23
0
def _register_misc_weight_func(func):
    filtkey = ut.get_funcname(func).replace('_match_weighter', '').lower()
    if ut.VERYVERBOSE:
        print('[nn_weights] registering simple func: %r' % (filtkey,))
    MISC_WEIGHT_FUNC_DICT[filtkey] = func
    return func
Пример #24
0
def _inject_getter_attrs(metaself,
                         objname,
                         attrs,
                         configurable_attrs,
                         depc_name=None,
                         depcache_attrs=None,
                         settable_attrs=None,
                         aliased_attrs=None):
    """
    Used by the metaclass to inject methods and properties into the class
    inheriting from ObjectList1D
    """

    if settable_attrs is None:
        settable_attrs = []
    settable_attrs = set(settable_attrs)

    # Inform the class of which variables will be injected
    metaself._settable_attrs = settable_attrs
    metaself._attrs = attrs
    metaself._configurable_attrs = configurable_attrs
    if depcache_attrs is None:
        metaself._depcache_attrs = []
    else:
        metaself._depcache_attrs = [
            '%s_%s' % (tbl, col) for tbl, col in depcache_attrs
        ]
    if aliased_attrs is not None:
        metaself._attrs_aliases = aliased_attrs
    else:
        metaself._attrs_aliases = {}

    # if not getattr(metaself, '__needs_inject__', True):
    #     return

    attr_to_aliases = ut.invert_dict(metaself._attrs_aliases,
                                     unique_vals=False)

    # What is difference between configurable and depcache getters?
    # Could depcache getters just be made configurable?
    # I guess its just an efficincy thing. Actually its config2_-vs-config
    # FIXME: rectify differences between normal / configurable / depcache
    # getter

    def _make_caching_setter(attrname, _rowid_setter):
        def _setter(self, values, *args, **kwargs):
            if self._ibs is None:
                self._internal_attrs[attrname] = values
            else:
                if self._caching and attrname in self._internal_attrs:
                    self._internal_attrs[attrname] = values
                _rowid_setter(self, self._rowids, values)

        ut.set_funcname(_setter, '_set_' + attrname)
        return _setter

    def _make_caching_getter(attrname, _rowid_getter):
        def _getter(self):
            if self._ibs is None or (self._caching
                                     and attrname in self._internal_attrs):
                data = self._internal_attrs[attrname]
            else:
                data = _rowid_getter(self, self._rowids)
                if self._caching:
                    self._internal_attrs[attrname] = data
            return data

        ut.set_funcname(_getter, '_get_' + attrname)
        return _getter

    # make default version use implicit rowids and another
    # that takes explicit rowids.

    def _make_setters(objname, attrname):
        ibs_funcname = 'set_%s_%s' % (objname, attrname)

        def _rowid_setter(self, rowids, values, *args, **kwargs):
            ibs_callable = getattr(self._ibs, ibs_funcname)
            ibs_callable(rowids, values, *args, **kwargs)

        ut.set_funcname(_rowid_setter, '_rowid_set_' + attrname)
        _setter = _make_caching_setter(attrname, _rowid_setter)
        return _rowid_setter, _setter

    # ---

    def _make_getters(objname, attrname):
        ibs_funcname = 'get_%s_%s' % (objname, attrname)

        def _rowid_getter(self, rowids):
            ibs_callable = getattr(self._ibs, ibs_funcname)
            data = ibs_callable(rowids)
            if self._asarray:
                data = np.array(data)
            return data

        ut.set_funcname(_rowid_getter, '_rowid_get_' + attrname)
        _getter = _make_caching_getter(attrname, _rowid_getter)
        return _rowid_getter, _getter

    def _make_cfg_getters(objname, attrname):
        ibs_funcname = 'get_%s_%s' % (objname, attrname)

        def _rowid_getter(self, rowids):
            ibs_callable = getattr(self._ibs, ibs_funcname)
            data = ibs_callable(rowids, config2_=self._config)
            if self._asarray:
                data = np.array(data)
            return data

        ut.set_funcname(_rowid_getter, '_rowid_get_' + attrname)
        _getter = _make_caching_getter(attrname, _rowid_getter)
        return _rowid_getter, _getter

    def _make_depc_getters(depc_name, attrname, tbl, col):
        def _rowid_getter(self, rowids):
            depc = getattr(self._ibs, depc_name)
            data = depc.get(tbl, rowids, col, config=self._config)
            if self._asarray:
                data = np.array(data)
            return data

        ut.set_funcname(_rowid_getter, '_rowid_get_' + attrname)
        _getter = _make_caching_getter(attrname, _rowid_getter)
        return _rowid_getter, _getter

    # Collect setter / getter functions and properties
    rowid_getters = []
    getters = []
    setters = []
    properties = []
    for attrname in attrs:
        _rowid_getter, _getter = _make_getters(objname, attrname)
        if attrname in settable_attrs:
            _rowid_setter, _setter = _make_setters(objname, attrname)
            setters.append(_setter)
        else:
            _setter = None
        prop = property(fget=_getter, fset=_setter)
        rowid_getters.append((attrname, _rowid_getter))
        getters.append(_getter)
        properties.append((attrname, prop))

    for attrname in configurable_attrs:
        _rowid_getter, _getter = _make_cfg_getters(objname, attrname)
        prop = property(fget=_getter)
        rowid_getters.append((attrname, _rowid_getter))
        getters.append(_getter)
        properties.append((attrname, prop))

    if depcache_attrs is not None:
        for tbl, col in depcache_attrs:
            attrname = '%s_%s' % (tbl, col)
            _rowid_getter, _getter = _make_depc_getters(
                depc_name, attrname, tbl, col)
            prop = property(fget=_getter, fset=None)
            rowid_getters.append((attrname, _rowid_getter))
            getters.append(_getter)
            properties.append((attrname, prop))

    aliases = []

    # Inject all gathered information
    for attrname, func in rowid_getters:
        funcname = ut.get_funcname(func)
        setattr(metaself, funcname, func)
        # ensure aliases have rowid getters
        for alias in attr_to_aliases.get(attrname, []):
            alias_funcname = '_rowid_get_' + alias
            setattr(metaself, alias_funcname, func)

    for func in getters:
        funcname = ut.get_funcname(func)
        setattr(metaself, funcname, func)

    for func in setters:
        funcname = ut.get_funcname(func)
        setattr(metaself, funcname, func)

    for attrname, prop in properties:
        setattr(metaself, attrname, prop)
        for alias in attr_to_aliases.pop(attrname, []):
            aliases.append((alias, attrname))
            setattr(metaself, alias, prop)

    if ut.get_argflag('--autogen-core'):
        # TODO: turn on autogenertion given a flag
        def expand_closure_source(funcname, func):
            source = ut.get_func_sourcecode(func)
            closure_vars = [
                (k, v.cell_contents)
                for k, v in zip(func.func_code.co_freevars, func.func_closure)
            ]
            source = ut.unindent(source)
            import re
            for k, v in closure_vars:
                source = re.sub('\\b' + k + '\\b', ut.repr2(v), source)
            source = re.sub('def .*\(self', 'def ' + funcname + '(self',
                            source)
            source = ut.indent(source.strip(), '    ') + '\n'
            return source

        explicit_lines = []
        # build explicit version for jedi?
        for funcname, func in getters:
            source = expand_closure_source(funcname, func)
            explicit_lines.append(source)
        # build explicit version for jedi?
        for funcname, func in setters:
            source = expand_closure_source(funcname, func)
            explicit_lines.append(source)

        for attrname, prop in properties:
            getter_name = None if prop.fget is None else ut.get_funcname(
                prop.fget)
            setter_name = None if prop.fset is None else ut.get_funcname(
                prop.fset)
            source = '    %s = property(%s, %s)' % (attrname, getter_name,
                                                    setter_name)
            explicit_lines.append(source)

        for alias, attrname in aliases:
            source = '    %s = %s' % (alias, attrname)
            explicit_lines.append(source)

        explicit_source = '\n'.join([
            'from ibeis import _ibeis_object',
            '',
            '',
            'class _%s_base_class(_ibeis_object.ObjectList1D):',
            '    __needs_inject__ = False',
            '',
        ]) % (objname, )
        explicit_source += '\n'.join(explicit_lines)
        explicit_fname = '_autogen_%s_base.py' % (objname, )
        from os.path import dirname, join
        ut.writeto(join(dirname(__file__), explicit_fname),
                   explicit_source + '\n')

    if attr_to_aliases:
        raise AssertionError('Unmapped aliases %r' % (attr_to_aliases, ))
Пример #25
0
def _register_misc_weight_func(func):
    filtkey = ut.get_funcname(func).replace('_match_weighter', '').lower()
    if ut.VERYVERBOSE:
        print('[nn_weights] registering simple func: %r' % (filtkey, ))
    MISC_WEIGHT_FUNC_DICT[filtkey] = func
    return func
Пример #26
0
def get_layer_info(layer):
    r"""
    Args:
        layer (?):

    Returns:
        ?: layer_info

    CommandLine:
        python -m ibeis_cnn.net_strs get_layer_info --show

    Example:
        >>> # DISABLE_DOCTEST
        >>> from ibeis_cnn.net_strs import *  # NOQA
        >>> from ibeis_cnn import models
        >>> model = models.mnist.MNISTModel(batch_size=8, data_shape=(24, 24, 1), output_dims=10)
        >>> model.init_arch()
        >>> nn_layers = model.get_all_layers()
        >>> for layer in nn_layers:
        >>>     layer_info = get_layer_info(layer)
        >>>     print(ut.repr3(layer_info, nl=1))
    """
    import operator
    import ibeis_cnn.__LASAGNE__ as lasagne
    # Information that contributes to RAM usage
    import numpy as np
    # Get basic layer infos
    output_shape = lasagne.layers.get_output_shape(layer)
    input_shape = getattr(layer, 'input_shape', [])
    # Get number of outputs ignoring the batch size
    num_outputs = functools.reduce(operator.mul, output_shape[1:])
    if len(input_shape):
        num_inputs = functools.reduce(operator.mul, input_shape[1:])
    else:
        num_inputs = 0
    # TODO: if we can ever support non float32 calculations this must change
    #layer_type = 'float32'
    layer_dtype = np.dtype('float32')

    # Get individual param infos
    param_infos = []
    for param, tags in layer.params.items():
        value = param.get_value()
        pbasename = param_basename(layer, param)
        param_info = ut.odict([
            ('name', param.name),
            ('basename', pbasename),
            ('tags', tags),
            ('shape', value.shape),
            ('size', value.size),
            ('itemsize', value.dtype.itemsize),
            ('dtype', str(value.dtype)),
            ('bytes', value.size * value.dtype.itemsize),
        ])

        def initializer_info(initclass):
            initclassname = initclass.__class__.__name__
            if initclassname == 'Constant':
                spec = initclass.val
            else:
                spec = ut.odict()
                spec['type'] = initclassname
                for key, val in initclass.__dict__.items():
                    if isinstance(val, lasagne.init.Initializer):
                        spec[key] = initializer_info(val)
                    elif isinstance(val, type) and issubclass(val, lasagne.init.Initializer):
                        spec[key] = val.__name__
                        #initializer_info(val())
                    else:
                        spec[key] = val
            return spec

        if hasattr(layer, '_initializers'):
            #print('layer = %r' % (layer,))
            initclass = layer._initializers[param]
            spec = initializer_info(initclass)
            param_info['init'] = spec

        param_infos.append(param_info)
    # Combine param infos
    param_str = surround(', '.join(
        [paramstr(layer, p, tags) for p, tags in layer.params.items()]), '[]')
    param_type_str = surround(', '.join(
        [repr(p.type) for p, tags in layer.params.items()]), '[]')
    num_params = sum([info['size'] for info in param_infos])

    classalias_map = {
        'ElemwiseSumLayer': 'ElemwiseSum',
        'Conv2DCCLayer'    : 'Conv2D',
        'Conv2DDNNLayer'   : 'Conv2D',
        'Conv2DLayer'   : 'Conv2D',
        'MaxPool2DLayer': 'MaxPool2D',
        'MaxPool2DCCLayer' : 'MaxPool2D',
        'MaxPool2DDNNLayer' : 'MaxPool2D',
        'LeakyRectify'     : 'LReLU',
        'InputLayer'       : 'Input',
        'GaussianNoiseLayer': 'Noise',
        'DropoutLayer'     : 'Dropout',
        'DenseLayer'       : 'Dense',
        'NonlinearityLayer' : 'Nonlinearity',
        'FlattenLayer'     : 'Flatten',
        'L2NormalizeLayer' : 'L2Norm',
        'BatchNormLayer'   : 'BatchNorm',
        'BatchNormLayer2'   : 'BatchNorm',
    }
    layer_attrs_ignore_dict = {
        'MaxPool2D'  : ['mode', 'ignore_border'],
        'Dropout'  : ['rescale'],
        'Conv2D'   : ['convolution'],
        'BatchNorm': ['epsilon', 'mean', 'inv_std', 'axes', 'beta', 'gamma'],
        'BatchNorm2': ['epsilon', 'mean', 'inv_std', 'axes', 'beta', 'gamma'],
        #'ElemwiseSum': ['merge_function', 'cropping'],
        #'ElemwiseSum': [],
        'FeaturePoolLayer': ['axis'],
    }
    layer_attrs_dict = {
        #'ElemwiseSum': ['coeffs'],
        #'ElemwiseSum': ['coeffs', 'merge_function', 'cropping'],
        'Noise'     : ['sigma'],
        'Input'     : ['shape'],
        'Dropout'   : ['p', 'shared_axes'],
        'Conv2D'    : ['num_filters', 'filter_size', 'stride', 'output_shape', 'num_groups'],
        'MaxPool2D' : ['stride', 'pool_size', 'output_shape'],  # 'mode'],
        'Dense'     : ['num_units', 'num_leading_axes'],
        'SoftMax'   : ['num_units', 'num_leading_axes'],
        'L2Norm'    : ['axis'],
        'BatchNorm' : ['alpha'],
        'BatchNorm2' : ['alpha'],
        'FeaturePoolLayer': ['pool_size', 'pool_function']
    }
    #layer_attrs_dict = {}
    all_ignore_attrs = ['nonlinearity', 'b', 'W', 'get_output_kwargs', 'name',
                        'input_shapes', 'input_layers', 'input_shape',
                        'input_layer', 'input_var', 'untie_biases',
                        '_initializers',
                        'flip_filters', 'pad', 'params', 'n', '_is_main_layer']

    classname = layer.__class__.__name__
    classalias = classalias_map.get(classname, classname)
    #if classalias == 'FeaturePoolLayer' and ut.get_funcname(layer.pool_function) == 'max':
    #    classalias = 'MaxOut'
    if classalias == 'Dense' and ut.get_funcname(layer.nonlinearity) == 'softmax':
        classalias = 'SoftMax'

    layer_attrs = ut.odict([
        (key, getattr(layer, key))
        for key in layer_attrs_dict.get(classalias, [])
    ])
    ignore_attrs = (all_ignore_attrs +
                    layer_attrs_ignore_dict.get(classalias, []))

    if classalias not in layer_attrs_dict or (classalias == classname and len(layer_attrs) == 0):
        layer_attrs = layer.__dict__.copy()
        ut.delete_dict_keys(layer_attrs, ignore_attrs)

    for key in list(layer_attrs.keys()):
        val = layer_attrs[key]
        if ut.is_funclike(val):
            layer_attrs[key] = ut.get_funcname(val)

    attr_key_list = list(layer_attrs.keys())
    missing_keys = (set(layer.__dict__.keys()) - set(ignore_attrs) - set(attr_key_list))
    missing_keys = [k for k in missing_keys if not k.startswith('_')]

    #if layer_type == 'Conv2DCCLayer':
    #    ut.embed()
    DEBUG = True
    if DEBUG and len(missing_keys) > 0:
        print('---')
        print(' * ' + classname)
        print(' * missing keys: %r' % (missing_keys,))
        print(' * has keys: %r' % (attr_key_list,))
        if True:
            #import utool
            #with utool.embed_on_exception_context:
            #raise AssertionError('MISSING KEYS')
            pass

    # handle None batch sizes
    if output_shape[0] is None:
        size = np.prod(output_shape[1:])
    else:
        size = np.prod(output_shape)

    layer_info = ut.odict([
        ('name', layer.name),
        ('classname', classname),
        ('classalias', classalias),
        ('output_shape', output_shape),
        ('input_shape', input_shape),
        ('num_outputs', num_outputs),
        ('num_inputs', num_inputs),
        ('size', size),
        ('itemsize', layer_dtype.itemsize),
        ('dtype', str(layer_dtype)),
        ('num_params', num_params),
        ('param_infos', param_infos),
        ('param_str', param_str),
        ('param_type_str', param_type_str),
        ('layer_attrs', layer_attrs),
        ('nonlinearity', None),
    ])

    if hasattr(layer, 'nonlinearity'):
        try:
            nonlinearity = layer.nonlinearity.__name__
        except AttributeError:
            nonlinearity = layer.nonlinearity.__class__.__name__
        layer_info['nonlinearity'] = ut.odict([])
        layer_info['nonlinearity']['type'] = nonlinearity
        layer_info['nonlinearity'].update(layer.nonlinearity.__dict__)
        #attr_str_list.append('nonlinearity={0}'.format(nonlinearity))

    param_bytes = sum([info['bytes'] for info in param_infos])
    layer_bytes = layer_info['size'] * layer_info['itemsize']
    #if classname in ['BatchNormLayer', 'NonlinearityLayer']:
    #    layer_bytes = 0
    layer_info['bytes'] = layer_bytes
    layer_info['param_bytes'] = param_bytes
    layer_info['total_bytes'] = layer_bytes + param_bytes
    layer_info['total_memory'] = ut.byte_str2(layer_info['total_bytes'])
    return layer_info
Пример #27
0
def gridsearch_chipextract():
    r"""
    CommandLine:
        python -m vtool.chip --test-gridsearch_chipextract --show

    Example:
        >>> # GRIDSEARCH
        >>> from vtool.chip import *  # NOQA
        >>> gridsearch_chipextract()
        >>> ut.show_if_requested()
    """
    import cv2
    test_func = extract_chip_from_img
    if False:
        gpath = ut.grab_test_imgpath('carl.jpg')
        bbox = (100, 3, 100, 100)
        theta = 0.0
        new_size = (58, 34)
    else:
        gpath = '/media/raid/work/GZ_Master1/_ibsdb/images/1524525d-2131-8770-d27c-3a5f9922e9e9.jpg'
        bbox = (450, 373, 2062, 1124)
        theta = 0.0
        old_size = bbox[2:4]
        #target_area = 700 ** 2
        target_area = 1200 ** 2
        new_size = get_scaled_sizes_with_area(target_area, [old_size])[0]
        print('old_size = %r' % (old_size,))
        print('new_size = %r' % (new_size,))
        #new_size = (677, 369)
    imgBGR = gtool.imread(gpath)
    args = (imgBGR, bbox, theta, new_size)
    param_info = ut.ParamInfoList('extract_params', [
        ut.ParamInfo('interpolation', cv2.INTER_LANCZOS4,
                     varyvals=[
                         cv2.INTER_LANCZOS4,
                         cv2.INTER_CUBIC,
                         cv2.INTER_LINEAR,
                         cv2.INTER_NEAREST,
                         #cv2.INTER_AREA
                     ],)
    ])
    show_func = None
    # Generalize
    import plottool as pt
    pt.imshow(imgBGR)  # HACK
    cfgdict_list, cfglbl_list = param_info.get_gridsearch_input(defaultslice=slice(0, 10))
    fnum = pt.ensure_fnum(None)
    if show_func is None:
        show_func = pt.imshow
    lbl = ut.get_funcname(test_func)
    cfgresult_list = [
        test_func(*args, **cfgdict)
        for cfgdict in ut.ProgressIter(cfgdict_list, lbl=lbl)
    ]
    onclick_func = None
    ut.interact_gridsearch_result_images(
        show_func, cfgdict_list, cfglbl_list,
        cfgresult_list, fnum=fnum,
        figtitle=lbl, unpack=False,
        max_plots=25, onclick_func=onclick_func)
    pt.iup()
Пример #28
0
def spawn_background_process(func, *args, **kwargs):
    """
    Run a function in the background
    (like rebuilding some costly data structure)

    References:
        http://stackoverflow.com/questions/2046603/is-it-possible-to-run-function-in-a-subprocess-without-threading-or-writing-a-se
        http://stackoverflow.com/questions/1196074/starting-a-background-process-in-python
        http://stackoverflow.com/questions/15063963/python-is-thread-still-running

    Args:
        func (function):

    CommandLine:
        python -m utool.util_parallel --test-spawn_background_process

    Example:
        >>> # SLOW_DOCTEST
        >>> from utool.util_parallel import *  # NOQA
        >>> import utool as ut
        >>> import time
        >>> from os.path import join
        >>> # build test data
        >>> fname = 'test_bgfunc_output.txt'
        >>> dpath = ut.get_app_resource_dir('utool')
        >>> ut.ensuredir(dpath)
        >>> fpath = join(dpath, fname)
        >>> # ensure file is not around
        >>> sleep_time = 1
        >>> ut.delete(fpath)
        >>> assert not ut.checkpath(fpath, verbose=True)
        >>> def backgrond_func(fpath, sleep_time):
        ...     import utool as ut
        ...     import time
        ...     print('[BG] Background Process has started')
        ...     time.sleep(sleep_time)
        ...     print('[BG] Background Process is writing')
        ...     ut.write_to(fpath, 'background process')
        ...     print('[BG] Background Process has finished')
        ...     #raise AssertionError('test exception')
        >>> # execute function
        >>> func = backgrond_func
        >>> args = (fpath, sleep_time)
        >>> kwargs = {}
        >>> print('[FG] Spawning process')
        >>> threadid = ut.spawn_background_process(func, *args, **kwargs)
        >>> assert threadid.is_alive() is True, 'thread should be active'
        >>> print('[FG] Spawned process. threadid=%r' % (threadid,))
        >>> # background process should not have finished yet
        >>> assert not ut.checkpath(fpath, verbose=True)
        >>> print('[FG] Waiting to check')
        >>> time.sleep(sleep_time + .1)
        >>> print('[FG] Finished waiting')
        >>> # Now the file should be there
        >>> assert ut.checkpath(fpath, verbose=True)
        >>> assert threadid.is_alive() is False, 'process should have died'
    """
    import utool as ut
    func_name = ut.get_funcname(func)
    name = 'mp.Progress-' + func_name
    #proc_obj = multiprocessing.Process(target=func, name=name, args=args, kwargs=kwargs)
    proc_obj = KillableProcess(target=func,
                               name=name,
                               args=args,
                               kwargs=kwargs)
    #proc_obj.daemon = True
    #proc_obj.isAlive = proc_obj.is_alive
    proc_obj.start()
    return proc_obj
Пример #29
0
def register_test(func):
    tests[ut.get_funcname(func)] = func
    return func
Пример #30
0
def spawn_background_process(func, *args, **kwargs):
    """
    Run a function in the background
    (like rebuilding some costly data structure)

    References:
        http://stackoverflow.com/questions/2046603/is-it-possible-to-run-function-in-a-subprocess-without-threading-or-writing-a-se
        http://stackoverflow.com/questions/1196074/starting-a-background-process-in-python
        http://stackoverflow.com/questions/15063963/python-is-thread-still-running

    Args:
        func (function):

    CommandLine:
        python -m utool.util_parallel --test-spawn_background_process

    Example:
        >>> # SLOW_DOCTEST
        >>> from utool.util_parallel import *  # NOQA
        >>> import utool as ut
        >>> import time
        >>> from os.path import join
        >>> # build test data
        >>> fname = 'test_bgfunc_output.txt'
        >>> dpath = ut.get_app_resource_dir('utool')
        >>> ut.ensuredir(dpath)
        >>> fpath = join(dpath, fname)
        >>> # ensure file is not around
        >>> sleep_time = 1
        >>> ut.delete(fpath)
        >>> assert not ut.checkpath(fpath, verbose=True)
        >>> def backgrond_func(fpath, sleep_time):
        ...     import utool as ut
        ...     import time
        ...     print('[BG] Background Process has started')
        ...     time.sleep(sleep_time)
        ...     print('[BG] Background Process is writing')
        ...     ut.write_to(fpath, 'background process')
        ...     print('[BG] Background Process has finished')
        ...     #raise AssertionError('test exception')
        >>> # execute function
        >>> func = backgrond_func
        >>> args = (fpath, sleep_time)
        >>> kwargs = {}
        >>> print('[FG] Spawning process')
        >>> threadid = ut.spawn_background_process(func, *args, **kwargs)
        >>> assert threadid.is_alive() is True, 'thread should be active'
        >>> print('[FG] Spawned process. threadid=%r' % (threadid,))
        >>> # background process should not have finished yet
        >>> assert not ut.checkpath(fpath, verbose=True)
        >>> print('[FG] Waiting to check')
        >>> time.sleep(sleep_time + .1)
        >>> print('[FG] Finished waiting')
        >>> # Now the file should be there
        >>> assert ut.checkpath(fpath, verbose=True)
        >>> assert threadid.is_alive() is False, 'process should have died'
    """
    import utool as ut
    func_name = ut.get_funcname(func)
    name = 'mp.Progress-' + func_name
    #proc_obj = multiprocessing.Process(target=func, name=name, args=args, kwargs=kwargs)
    proc_obj = KillableProcess(target=func, name=name, args=args, kwargs=kwargs)
    #proc_obj.daemon = True
    #proc_obj.isAlive = proc_obj.is_alive
    proc_obj.start()
    return proc_obj
Пример #31
0
def preserve_sig(wrapper, orig_func, force=False):
    """
    Decorates a wrapper function.

    It seems impossible to presever signatures in python 2 without eval
    (Maybe another option is to write to a temporary module?)

    Args:
        wrapper: the function wrapping orig_func to change the signature of
        orig_func: the original function to take the signature from

    References:
        http://emptysqua.re/blog/copying-a-python-functions-signature/
        https://code.google.com/p/micheles/source/browse/decorator/src/decorator.py

    TODO:
        checkout funcsigs
        https://funcsigs.readthedocs.org/en/latest/

    Example:
        >>> # ENABLE_DOCTEST
        >>> import utool as ut
        >>> #ut.rrrr(False)
        >>> def myfunction(self, listinput_, arg1, *args, **kwargs):
        >>>     " just a test function "
        >>>     return [x + 1 for x in listinput_]
        >>> orig_func = myfunction
        >>> wrapper = ut.accepts_scalar_input2([0])(orig_func)
        >>> _wrp_preserve1 = ut.preserve_sig(wrapper, orig_func, True)
        >>> _wrp_preserve2 = ut.preserve_sig(wrapper, orig_func, False)
        >>> print(ut.get_func_sourcecode(_wrp_preserve1))
        >>> print(ut.get_func_sourcecode(_wrp_preserve2))

        >>> result = str(_wrp_preserve1)
        >>> print(result)
    """
    import utool as ut

    if wrapper is orig_func:
        # nothing to do
        return orig_func
    orig_docstr = ut.get_funcdoc(orig_func)
    orig_docstr = '' if orig_docstr is None else orig_docstr
    orig_argspec = ut.get_func_argspec(orig_func)
    wrap_name = meta_util_six.get_funccode(wrapper).co_name
    orig_name = ut.get_funcname(orig_func)

    # At the very least preserve info in a dictionary
    _utinfo = {}
    _utinfo['orig_func'] = orig_func
    _utinfo['wrap_name'] = wrap_name
    _utinfo['orig_name'] = orig_name
    _utinfo['orig_argspec'] = orig_argspec

    if hasattr(wrapper, '_utinfo'):
        parent_wrapper_utinfo = wrapper._utinfo
        _utinfo['parent_wrapper_utinfo'] = parent_wrapper_utinfo
    if hasattr(orig_func, '_utinfo'):
        parent_orig_utinfo = orig_func._utinfo
        _utinfo['parent_orig_utinfo'] = parent_orig_utinfo

    # environment variable is set if you are building documentation
    # preserve sig if building docs
    building_docs = os.environ.get('UTOOL_AUTOGEN_SPHINX_RUNNING', 'OFF') == 'ON'

    if force or SIG_PRESERVE or building_docs:
        # PRESERVES ALL SIGNATURES WITH EXECS
        src_fmt = r'''
        def _wrp_preserve{defsig}:
            """ {orig_docstr} """
            try:
                return wrapper{callsig}
            except Exception as ex:
                import utool as ut
                msg = ('Failure in signature preserving wrapper:\n')
                ut.printex(ex, msg)
                raise
        '''
        # Put wrapped function into a scope
        globals_ =  {'wrapper': wrapper}
        locals_ = {}
        # argspec is :ArgSpec(args=['bar', 'baz'], varargs=None, keywords=None, defaults=(True,))
        # get orig functions argspec
        # get functions signature
        # Get function call signature (no defaults)
        # Define an exec function
        argspec = inspect.getargspec(orig_func)
        (args, varargs, varkw, defaults) = argspec
        defsig = inspect.formatargspec(*argspec)
        callsig = inspect.formatargspec(*argspec[0:3])
        src_fmtdict = dict(defsig=defsig, callsig=callsig, orig_docstr=orig_docstr)
        src = textwrap.dedent(src_fmt).format(**src_fmtdict)
        # Define the new function on the fly
        # (I wish there was a non exec / eval way to do this)
        #print(src)
        six.exec_(src, globals_, locals_)
        # Use functools.update_wapper to complete preservation
        _wrp_preserve = functools.update_wrapper(locals_['_wrp_preserve'], orig_func)
        # Keep debug info
        _utinfo['src'] = src
        # Set an internal sig variable that we may use
        #_wrp_preserve.__sig__ = defsig
    else:
        # PRESERVES SOME SIGNATURES NO EXEC
        # signature preservation is turned off. just preserve the name.
        # Does not use any exec or eval statments.
        import utool as ut
        _wrp_preserve = functools.update_wrapper(wrapper, orig_func)
        # Just do something to preserve signature

    DEBUG_WRAPPED_DOCSTRING = False
    if DEBUG_WRAPPED_DOCSTRING:
        new_docstr_fmtstr = ut.codeblock(
            '''
            Wrapped function {wrap_name}({orig_name})

            orig_argspec = {orig_argspec}

            orig_docstr = {orig_docstr}
            '''
        )
    else:
        new_docstr_fmtstr = ut.codeblock(
            '''
            {orig_docstr}
            '''
        )
    new_docstr = new_docstr_fmtstr.format(wrap_name=wrap_name,
                                          orig_name=orig_name, orig_docstr=orig_docstr,
                                          orig_argspec=orig_argspec)
    ut.set_funcdoc(_wrp_preserve, new_docstr)
    _wrp_preserve._utinfo = _utinfo
    return _wrp_preserve
Пример #32
0
def gridsearch_chipextract():
    r"""
    CommandLine:
        python -m vtool.chip --test-gridsearch_chipextract --show

    Example:
        >>> # GRIDSEARCH
        >>> from vtool.chip import *  # NOQA
        >>> gridsearch_chipextract()
        >>> ut.show_if_requested()
    """
    import cv2
    test_func = extract_chip_from_img
    if False:
        gpath = ut.grab_test_imgpath('carl.jpg')
        bbox = (100, 3, 100, 100)
        theta = 0.0
        new_size = (58, 34)
    else:
        gpath = '/media/raid/work/GZ_Master1/_ibsdb/images/1524525d-2131-8770-d27c-3a5f9922e9e9.jpg'
        bbox = (450, 373, 2062, 1124)
        theta = 0.0
        old_size = bbox[2:4]
        #target_area = 700 ** 2
        target_area = 1200**2
        new_size = get_scaled_sizes_with_area(target_area, [old_size])[0]
        print('old_size = %r' % (old_size, ))
        print('new_size = %r' % (new_size, ))
        #new_size = (677, 369)
    imgBGR = gtool.imread(gpath)
    args = (imgBGR, bbox, theta, new_size)
    param_info = ut.ParamInfoList(
        'extract_params',
        [
            ut.ParamInfo(
                'interpolation',
                cv2.INTER_LANCZOS4,
                varyvals=[
                    cv2.INTER_LANCZOS4,
                    cv2.INTER_CUBIC,
                    cv2.INTER_LINEAR,
                    cv2.INTER_NEAREST,
                    #cv2.INTER_AREA
                ],
            )
        ])
    show_func = None
    # Generalize
    import plottool as pt
    pt.imshow(imgBGR)  # HACK
    cfgdict_list, cfglbl_list = param_info.get_gridsearch_input(
        defaultslice=slice(0, 10))
    fnum = pt.ensure_fnum(None)
    if show_func is None:
        show_func = pt.imshow
    lbl = ut.get_funcname(test_func)
    cfgresult_list = [
        test_func(*args, **cfgdict)
        for cfgdict in ut.ProgressIter(cfgdict_list, lbl=lbl)
    ]
    onclick_func = None
    ut.interact_gridsearch_result_images(show_func,
                                         cfgdict_list,
                                         cfglbl_list,
                                         cfgresult_list,
                                         fnum=fnum,
                                         figtitle=lbl,
                                         unpack=False,
                                         max_plots=25,
                                         onclick_func=onclick_func)
    pt.iup()