コード例 #1
0
ファイル: rsync_ibeisdb.py プロジェクト: heroinlin/ibeis
def sync_ibeisdb(remote_uri, dbname, mode='pull', workdir=None, port=22, dryrun=False):
    """
    syncs an ibeisdb without syncing the cache or the chip directory
    (or the top level image directory because it shouldnt exist unless it is an
    old hots database)
    """
    print('[sync_ibeisdb] Syncing')
    print('  * dbname=%r ' % (dbname,))
    print('  * remote_uri=%r' % (remote_uri,))
    print('  * mode=%r' % (mode))
    import ibeis
    # Excluded temporary and cached data
    exclude_dirs = list(map(ut.ensure_unixslash, ibeis.const.EXCLUDE_COPY_REL_DIRS))
    # Specify local workdir
    if workdir is None:
        workdir = ibeis.sysres.get_workdir()
    local_uri = ut.ensure_unixslash(workdir)
    if ut.WIN32:
        # fix for mingw rsync
        local_uri = ut.ensure_mingw_drive(local_uri)
    if mode == 'pull':
        # pull remote to local
        remote_src = ut.unixjoin(remote_uri, dbname)
        ut.assert_exists(local_uri)
        rsync(remote_src, local_uri, exclude_dirs, port, dryrun=dryrun)
    elif mode == 'push':
        # push local to remote
        local_src = ut.unixjoin(local_uri, dbname)
        if not dryrun:
            ut.assert_exists(local_src)
        rsync(local_src, remote_uri, exclude_dirs, port, dryrun=dryrun)
        if dryrun:
            ut.assert_exists(local_src)
    else:
        raise AssertionError('unknown mode=%r' % (mode,))
コード例 #2
0
ファイル: depends_cache.py プロジェクト: Erotemic/ibeis
 def dummy_manual_chipmask(depc, parent_rowids, config=None):
     import vtool as vt
     from plottool import interact_impaint
     mask_dpath = ut.unixjoin(depc.cache_dpath, 'ManualChipMask')
     ut.ensuredir(mask_dpath)
     if config is None:
         config = {}
     print('Requesting user defined chip mask')
     for rowid in parent_rowids:
         img = vt.imread(gpath_list[rowid])
         mask = interact_impaint.impaint_mask2(img)
         mask_fpath = ut.unixjoin(mask_dpath, 'mask%d.png' % (rowid,))
         vt.imwrite(mask_fpath, mask)
         w, h = vt.get_size(mask)
         yield (w, h), mask_fpath
コード例 #3
0
ファイル: preproc_probchip.py プロジェクト: heroinlin/ibeis
def get_annot_probchip_fpath_list(ibs, aid_list, config2_=None, species=None):
    """ Build probability chip file paths based on the current IBEIS configuration

    Args:
        ibs (IBEISController):
        aid_list (list):
        suffix (None):

    Returns:
        probchip_fpath_list

    Example:
        >>> # ENABLE_DOCTEST
        >>> from ibeis.algo.preproc.preproc_probchip import *  # NOQA
        >>> from os.path import basename
        >>> ibs, aid_list = preproc_chip.testdata_ibeis()
        >>> config2_ = ibs.new_query_params(dict(fg_on=False))
        >>> probchip_fpath_list = get_annot_probchip_fpath_list(ibs, aid_list, config2_=config2_)
        >>> result = ut.relpath_unix(probchip_fpath_list[1], ibs.get_dbdir())
        >>> print(result)
        _ibsdb/_ibeis_cache/prob_chips/probchip_avuuid=5a1a53ba-fd44-b113-7f8c-fcf248d7047f_CHIP(sz450)_FEATWEIGHT(OFF).png

        _ibsdb/_ibeis_cache/prob_chips/probchip_avuuid=5a1a53ba-fd44-b113-7f8c-fcf248d7047f_CHIP(sz450)_FEATWEIGHT(ON,uselabel,rf).png

    probchip_aid=5_bbox=(0,0,1072,804)_theta=0.0tau_gid=5_CHIP(sz450)_FEATWEIGHT(ON,uselabel,rf)_CHIP().png
    """
    ibs.probchipdir = ibs.get_probchip_dir()
    cachedir = ibs.get_probchip_dir()
    ut.ensuredir(cachedir)
    probchip_fname_fmt = get_probchip_fname_fmt(ibs, config2_=config2_, species=species)
    annot_visual_uuid_list  = ibs.get_annot_visual_uuids(aid_list)
    probchip_fpath_list = [ut.unixjoin(cachedir, probchip_fname_fmt.format(avuuid=avuuid))
                           for avuuid in annot_visual_uuid_list]
    return probchip_fpath_list
コード例 #4
0
def _init_config(ibs):
    r"""
    Loads the database's algorithm configuration

    TODO: per-species config
    """
    #####
    # <GENERAL CONFIG>
    config_fpath = ut.unixjoin(ibs.get_dbdir(), 'general_config.cPkl')
    try:
        general_config = ut.load_cPkl(config_fpath, verbose=ut.VERBOSE)
    except IOError as ex:
        logger.error('*** failed to load general config', exc_info=ex)
        general_config = {}
        ut.save_cPkl(config_fpath, general_config, verbose=ut.VERBOSE)
    current_species = general_config.get('current_species', None)
    logger.info('[_init_config] general_config.current_species = %r' %
                (current_species, ))
    # </GENERAL CONFIG>
    #####
    # species_list = ibs.get_database_species()
    # if current_species is None:
    #     # species_list = ibs.get_database_species()
    #     # species_list[0] if len(species_list) == 1 else None
    #     primary_species = ibs.get_primary_database_species()
    #     current_species = primary_species
    cfgname = 'cfg' if current_species is None else current_species
    if ut.VERBOSE and ut.NOT_QUIET:
        # logger.info('[_init_config] Loading database with species_list = %r ' % (species_list,))
        logger.info('[_init_config] Using cfgname=%r' % (cfgname, ))
    # try to be intelligent about the default speceis
    ibs._load_named_config(cfgname)
コード例 #5
0
ファイル: tag_funcs.py プロジェクト: Erotemic/ibeis
def export_tagged_chips(ibs, aid_list, dpath='.'):
    """
    CommandLine:
        python -m ibeis.tag_funcs --exec-export_tagged_chips --tags Hard interesting needswork --db PZ_Master1
        python -m ibeis.tag_funcs --exec-export_tagged_chips --logic=or --any_startswith quality occlusion --has_any lighting needswork interesting hard --db GZ_Master1 --dpath=/media/raid
        python -m ibeis.tag_funcs --exec-export_tagged_chips --db GZ_Master1 --min_num=1  --dpath /media/raid

    Example:
        >>> # SCRIPT
        >>> from ibeis.tag_funcs import *  # NOQA
        >>> import ibeis
        >>> ibs = ibeis.opendb(defaultdb='testdb1')
        >>> kwargs = ut.argparse_dict(ut.get_kwdefaults2(filterflags_general_tags), type_hint=ut.ddict(list, logic=str))
        >>> ut.print_dict(kwargs, 'filter args')
        >>> aid_list = ibs.filter_annots_by_tags(**kwargs)
        >>> print('len(aid_list) = %r' % (len(aid_list),))
        >>> dpath = ut.get_argval('--dpath', default='')
        >>> all_tags = ut.flatten(ibs.get_annot_all_tags(aid_list))
        >>> filtered_tag_hist = ut.dict_hist(all_tags)
        >>> ut.print_dict(filtered_tag_hist, key_order_metric='val')
        >>> export_tagged_chips(ibs, aid_list, dpath)
    """
    visual_uuid_hashid = ibs.get_annot_hashid_visual_uuid(aid_list, _new=True)
    zip_fpath = ut.unixjoin(dpath, 'exported_chips2_' + ibs.get_dbname() +
                            visual_uuid_hashid + '.zip')
    chip_fpath = ibs.get_annot_chip_fpath(aid_list)
    ut.archive_files(zip_fpath, chip_fpath, common_prefix=True)
コード例 #6
0
def export_tagged_chips(ibs, aid_list, dpath='.'):
    """
    DEPRICATE

    CommandLine:
        python -m wbia.tag_funcs --exec-export_tagged_chips --tags Hard interesting needswork --db PZ_Master1
        python -m wbia.tag_funcs --exec-export_tagged_chips --logic=or --any_startswith quality occlusion --has_any lighting needswork interesting hard --db GZ_Master1 --dpath=/media/raid
        python -m wbia.tag_funcs --exec-export_tagged_chips --db GZ_Master1 --min_num=1  --dpath /media/raid

    Example:
        >>> # SCRIPT
        >>> from wbia.tag_funcs import *  # NOQA
        >>> import wbia
        >>> ibs = wbia.opendb(defaultdb='testdb1')
        >>> kwargs = ut.argparse_dict(ut.get_kwdefaults2(filterflags_general_tags), type_hint=ut.ddict(list, logic=str))
        >>> ut.print_dict(kwargs, 'filter args')
        >>> aid_list = ibs.filter_annots_by_tags(**kwargs)
        >>> print('len(aid_list) = %r' % (len(aid_list),))
        >>> dpath = ut.get_argval('--dpath', default='')
        >>> all_tags = ut.flatten(ibs.get_annot_all_tags(aid_list))
        >>> filtered_tag_hist = ut.dict_hist(all_tags)
        >>> ut.print_dict(filtered_tag_hist, key_order_metric='val')
        >>> export_tagged_chips(ibs, aid_list, dpath)
    """
    visual_uuid_hashid = ibs.get_annot_hashid_visual_uuid(aid_list)
    zip_fpath = ut.unixjoin(
        dpath,
        'exported_chips2_' + ibs.get_dbname() + visual_uuid_hashid + '.zip')
    chip_fpath = ibs.get_annot_chip_fpath(aid_list)
    ut.archive_files(zip_fpath, chip_fpath, common_prefix=True)
コード例 #7
0
ファイル: manual_meta_funcs.py プロジェクト: warunanc/ibeis
def _init_config(ibs):
    r"""
    Loads the database's algorithm configuration

    TODO: per-species config
    """
    #####
    # <GENERAL CONFIG>
    config_fpath = ut.unixjoin(ibs.get_dbdir(), 'general_config.cPkl')
    try:
        general_config = ut.load_cPkl(config_fpath, verbose=ut.VERBOSE)
    except IOError as ex:
        if ut.VERBOSE:
            ut.printex(ex, 'failed to genral load config', iswarning=True)
        general_config = {}
    current_species = general_config.get('current_species', None)
    if ut.VERBOSE and ut.NOT_QUIET:
        print('[_init_config] general_config.current_species = %r' %
              (current_species, ))
    # </GENERAL CONFIG>
    #####
    #species_list = ibs.get_database_species()
    if current_species is None:
        #species_list = ibs.get_database_species()
        #species_list[0] if len(species_list) == 1 else None
        primary_species = ibs.get_primary_database_species()
        current_species = primary_species
    cfgname = 'cfg' if current_species is None else current_species
    if ut.VERBOSE and ut.NOT_QUIET:
        #print('[_init_config] Loading database with species_list = %r ' % (species_list,))
        print('[_init_config] Using cfgname=%r' % (cfgname, ))
    # try to be intelligent about the default speceis
    ibs._load_named_config(cfgname)
コード例 #8
0
ファイル: old_chip_preproc.py プロジェクト: whaozl/ibeis
def make_annot_chip_fpath_list(ibs, aid_list, config2_=None):
    chipdir = ibs.get_chipdir()
    chip_uri_list = make_annot_chip_uri_list(ibs, aid_list, config2_=config2_)
    cfpath_list = [
        ut.unixjoin(chipdir, chip_uri) for chip_uri in chip_uri_list
    ]
    return cfpath_list
コード例 #9
0
ファイル: manual_meta_funcs.py プロジェクト: heroinlin/ibeis
def _init_config(ibs):
    r"""
    Loads the database's algorithm configuration

    TODO: per-species config
    """
    #####
    # <GENERAL CONFIG>
    config_fpath = ut.unixjoin(ibs.get_dbdir(), 'general_config.cPkl')
    try:
        general_config = ut.load_cPkl(config_fpath)
    except IOError:
        general_config = {}
    current_species = general_config.get('current_species', None)
    if ut.VERBOSE and ut.NOT_QUIET:
        print('[_init_config] general_config.current_species = %r' % (current_species,))
    # </GENERAL CONFIG>
    #####
    species_list = ibs.get_database_species()
    if current_species is None:
        species_list = ibs.get_database_species()
        current_species = species_list[0] if len(species_list) == 1 else None
    cfgname = 'cfg' if current_species is None else current_species
    if ut.VERBOSE and ut.NOT_QUIET:
        print('[_init_config] Loading database with species_list = %r ' % (species_list,))
        print('[_init_config] Using cfgname=%r' % (cfgname,))
    # try to be intelligent about the default speceis
    ibs._load_named_config(cfgname)
コード例 #10
0
ファイル: controller_inject.py プロジェクト: whaozl/ibeis
def dev_autogen_explicit_injects():
    r"""
    CommandLine:
        python -m ibeis --tf dev_autogen_explicit_injects

    Example:
        >>> # SCRIPT
        >>> from ibeis.control.controller_inject import *  # NOQA
        >>> dev_autogen_explicit_injects()
    """
    import ibeis  # NOQA
    classname = CONTROLLER_CLASSNAME
    regen_command = (
        'python -m ibeis.control.controller_inject '
        '--exec-dev_autogen_explicit_injects')
    import ibeis.control.IBEISControl
    conditional_imports = [
        modname for modname in ibeis.control.IBEISControl.AUTOLOAD_PLUGIN_MODNAMES
        if isinstance(modname, tuple)
    ]
    source_block = ut.autogen_explicit_injectable_metaclass(
        classname, regen_command, conditional_imports)
    dpath = ut.get_module_dir(ibeis.control.IBEISControl)
    fpath = ut.unixjoin(dpath, '_autogen_explicit_controller.py')
    ut.writeto(fpath, source_block)
コード例 #11
0
def imwrite_theano_symbolic_graph(thean_expr):
    import theano
    graph_dpath = '.'
    graph_fname = 'symbolic_graph.png'
    graph_fpath = ut.unixjoin(graph_dpath, graph_fname)
    ut.ensuredir(graph_dpath)
    theano.printing.pydotprint(
        thean_expr, outfile=graph_fpath, var_with_name_simple=True)
    ut.startfile(graph_fpath)
    return graph_fpath
コード例 #12
0
def sync_wbiadb(remote_uri,
                dbname,
                mode='pull',
                workdir=None,
                port=22,
                dryrun=False):
    """
    syncs an wbiadb without syncing the cache or the chip directory
    (or the top level image directory because it shouldnt exist unlese
    it is an old hots database)
    """
    logger.info('[sync_wbiadb] Syncing')
    logger.info('  * dbname=%r ' % (dbname, ))
    logger.info('  * remote_uri=%r' % (remote_uri, ))
    logger.info('  * mode=%r' % (mode))
    import wbia

    assert dbname is not None, 'must specify a database name'
    # Excluded temporary and cached data
    exclude_dirs = list(
        map(ut.ensure_unixslash, wbia.const.EXCLUDE_COPY_REL_DIRS))
    # Specify local workdir
    if workdir is None:
        workdir = wbia.sysres.get_workdir()
    local_uri = ut.ensure_unixslash(workdir)
    if ut.WIN32:
        # fix for mingw rsync
        local_uri = ut.ensure_mingw_drive(local_uri)
    if mode == 'pull':
        # pull remote to local
        remote_src = ut.unixjoin(remote_uri, dbname)
        ut.assert_exists(local_uri)
        ut.rsync(remote_src, local_uri, exclude_dirs, port, dryrun=dryrun)
    elif mode == 'push':
        # push local to remote
        local_src = ut.unixjoin(local_uri, dbname)
        if not dryrun:
            ut.assert_exists(local_src)
        ut.rsync(local_src, remote_uri, exclude_dirs, port, dryrun=dryrun)
        if dryrun:
            ut.assert_exists(local_src)
    else:
        raise AssertionError('unknown mode=%r' % (mode, ))
コード例 #13
0
def ensure_model(model, redownload=False):
    try:
        url = MODEL_DOMAIN + MODEL_URLS[model]
        extracted_fpath = ut.grab_file_url(url, appname='ibeis_cnn',
                                           redownload=redownload,
                                           check_hash=True)
    except KeyError as ex:
        ut.printex(ex, 'model is not uploaded', iswarning=True)
        extracted_fpath = ut.unixjoin(ut.get_app_resource_dir('ibeis_cnn'), model)
        ut.assert_exists(extracted_fpath)
    return extracted_fpath
コード例 #14
0
ファイル: util_web.py プロジェクト: SU-ECE-18-7/utool
def render_html(html_str):
    """
    makes a temporary html rendering
    """
    import utool as ut
    from os.path import abspath
    import webbrowser

    html_dpath = ut.ensure_app_resource_dir('utool', 'temp_html')
    fpath = abspath(ut.unixjoin(html_dpath, 'temp.html'))
    url = 'file://' + fpath
    ut.writeto(fpath, html_str)
    webbrowser.open(url)
コード例 #15
0
def reset_local_wildbook():
    r"""
    CommandLine:
        python -m ibeis.control.manual_wildbook_funcs --test-reset_local_wildbook

    Example:
        >>> # SCRIPT
        >>> from ibeis.control.manual_wildbook_funcs import *  # NOQA
        >>> reset_local_wildbook()
    """
    import utool as ut
    try:
        shutdown_wildbook_server()
    except ImportError:
        pass
    ut.delete(ut.unixjoin(ut.get_app_resource_dir('ibeis'), 'tomcat'))
コード例 #16
0
def vim_grep_project(pat, hashid=None):
    import vim
    import utool as ut
    ut.ENABLE_COLORS = False
    ut.util_str.ENABLE_COLORS = False
    if hashid is None:
        hashid = ut.hashstr27(pat)
    print('Grepping for pattern = %r' % (pat,))
    msg_list = ut.grep_projects([pat], verbose=False, colored=False)
    fname = 'tmp_grep_' + hashid + '.txt'
    dpath = ut.get_app_resource_dir('utool')
    fpath = ut.unixjoin(dpath, fname)
    #pyvim_funcs.vim_fpath_cmd('split', fpath)
    vim_fpath_cmd('new', fpath)
    text = '\n'.join(msg_list)
    overwrite_text(text)
    vim.command(":exec ':w'")
コード例 #17
0
ファイル: depends_cache.py プロジェクト: Erotemic/ibeis
    def initialize(depc):
        print('[depc] INITIALIZE DEPCACHE')

        if depc._use_globals:
            print(' * regsitering %d global preproc funcs' % (len(__PREPROC_REGISTER__),))
            for args_, kwargs_ in __PREPROC_REGISTER__:
                depc._register_prop(*args_, **kwargs_)

        ut.ensuredir(depc.cache_dpath)
        #print('depc.cache_dpath = %r' % (depc.cache_dpath,))
        config_addtable_kw = ut.odict(
            [
                ('tablename', CONFIG_TABLE,),
                ('coldef_list', [
                    (CONFIG_ROWID, 'INTEGER PRIMARY KEY'),
                    (CONFIG_HASHID, 'TEXT'),
                ],),
                ('docstr', 'table for algo configurations'),
                ('superkeys', [(CONFIG_HASHID,)]),
                ('dependson', [])
            ]
        )
        #print(ut.repr3(config_addtable_kw))

        #print('depc.fname_to_db.keys = %r' % (depc.fname_to_db,))
        for fname in depc.fname_to_db.keys():
            #print('fname = %r' % (fname,))
            if fname == ':memory:':
                fpath = fname
            else:
                fname_ = ut.ensure_ext(fname, '.sqlite')
                fpath = ut.unixjoin(depc.cache_dpath, fname_)
            #print('fpath = %r' % (fpath,))
            db = SQLDatabaseController(fpath=fpath, simple=True)
            if not db.has_table(CONFIG_TABLE):
                db.add_table(**config_addtable_kw)
            depc.fname_to_db[fname] = db
        print('[depc] Finished initialization')

        for table in depc.cachetable_dict.values():
            table.initialize()
コード例 #18
0
ファイル: wildbook_manager.py プロジェクト: Erotemic/ibeis
def purge_local_wildbook():
    r"""
    Shuts down the server and then purges the server on disk

    CommandLine:
        python -m ibeis purge_local_wildbook
        python -m ibeis purge_local_wildbook --purge-war

    Example:
        >>> # SCRIPT
        >>> from ibeis.control.wildbook_manager import *  # NOQA
        >>> purge_local_wildbook()
    """
    try:
        shutdown_wildbook_server()
    except ImportError:
        pass
    ut.delete(ut.unixjoin(ut.get_app_resource_dir('ibeis'), 'tomcat'))
    if ut.get_argflag('--purge-war'):
        war_fpath = find_or_download_wilbook_warfile(ensure=False)
        ut.delete(war_fpath)
コード例 #19
0
def purge_local_wildbook():
    r"""
    Shuts down the server and then purges the server on disk

    CommandLine:
        python -m ibeis purge_local_wildbook
        python -m ibeis purge_local_wildbook --purge-war

    Example:
        >>> # SCRIPT
        >>> from ibeis.control.wildbook_manager import *  # NOQA
        >>> purge_local_wildbook()
    """
    try:
        shutdown_wildbook_server()
    except ImportError:
        pass
    ut.delete(ut.unixjoin(ut.get_app_resource_dir('ibeis'), 'tomcat'))
    if ut.get_argflag('--purge-war'):
        war_fpath = find_or_download_wilbook_warfile(ensure=False)
        ut.delete(war_fpath)
コード例 #20
0
    def dump_to_disk(self, dpath, num=None, prefix='temp_img'):
        import numpy as np
        import wbia.plottool as pt

        dpath = ut.ensurepath(dpath)
        num_zeros = np.ceil(np.log10(len(self.gpath_list)))
        total = len(self.gpath_list)
        if num is None:
            num = total
        fmtstr = prefix + '_%0' + str(num_zeros) + 'd.jpg'
        fig = pt.figure(fnum=self.fnum)
        for index in ut.ProgIter(range(num), lbl='dumping images to disk'):
            fig = pt.figure(fnum=self.fnum)
            fig.clf()
            ax = self._plot_index(index, {'fnum': self.fnum})
            fig = ax.figure
            axes_extents = pt.extract_axes_extents(fig)
            assert len(axes_extents) == 1, 'more than one axes'
            extent = axes_extents[0]
            fpath = ut.unixjoin(dpath, fmtstr % (index))
            fig.savefig(fpath, bbox_inches=extent)
        pt.plt.close(fig)
コード例 #21
0
ファイル: manual_chip_funcs.py プロジェクト: heroinlin/ibeis
def get_chip_fpath(ibs, cid_list, check_external_storage=False):
    r"""
    Combines the uri with the expected chip directory.
    config2_ is only needed if ensure_external_storage=True

    Returns:
        chip_fpath_list (list): a list of chip paths by their aid

    RESTful:
        Method: GET
        URL:    /api/chip/fpath/
    """
    if check_external_storage:
        chip_fpath_list = check_chip_external_storage(ibs, cid_list)
    else:
        chip_uri_list = ibs.get_chip_uris(cid_list)
        chipdir = ibs.get_chipdir()
        chip_fpath_list = [
            None if chip_uri is None else ut.unixjoin(chipdir, chip_uri)
            for chip_uri in chip_uri_list
        ]
    return chip_fpath_list
コード例 #22
0
ファイル: preproc_probchip.py プロジェクト: whaozl/ibeis
def get_annot_probchip_fpath_list(ibs, aid_list, config2_=None, species=None):
    """ Build probability chip file paths based on the current IBEIS configuration

    Args:
        ibs (IBEISController):
        aid_list (list):
        suffix (None):

    Returns:
        probchip_fpath_list

    Example:
        >>> # ENABLE_DOCTEST
        >>> from ibeis.algo.preproc.preproc_probchip import *  # NOQA
        >>> from os.path import basename
        >>> ibs, aid_list = preproc_chip.testdata_ibeis()
        >>> config2_ = ibs.new_query_params(dict(fg_on=False))
        >>> probchip_fpath_list = get_annot_probchip_fpath_list(ibs, aid_list, config2_=config2_)
        >>> result = ut.relpath_unix(probchip_fpath_list[1], ibs.get_dbdir())
        >>> print(result)
        _ibsdb/_ibeis_cache/prob_chips/probchip_avuuid=5a1a53ba-fd44-b113-7f8c-fcf248d7047f_CHIP(sz450)_FEATWEIGHT(OFF).png

        _ibsdb/_ibeis_cache/prob_chips/probchip_avuuid=5a1a53ba-fd44-b113-7f8c-fcf248d7047f_CHIP(sz450)_FEATWEIGHT(ON,uselabel,rf).png

    probchip_aid=5_bbox=(0,0,1072,804)_theta=0.0tau_gid=5_CHIP(sz450)_FEATWEIGHT(ON,uselabel,rf)_CHIP().png
    """
    ibs.probchipdir = ibs.get_probchip_dir()
    cachedir = ibs.get_probchip_dir()
    ut.ensuredir(cachedir)
    probchip_fname_fmt = get_probchip_fname_fmt(ibs,
                                                config2_=config2_,
                                                species=species)
    annot_visual_uuid_list = ibs.get_annot_visual_uuids(aid_list)
    probchip_fpath_list = [
        ut.unixjoin(cachedir, probchip_fname_fmt.format(avuuid=avuuid))
        for avuuid in annot_visual_uuid_list
    ]
    return probchip_fpath_list
コード例 #23
0
    pkgname = basename(repodir)
    packages = utool.ls_moduledirs(repodir, full=False)
    print(pkgname)
    setup_text = setup_text_fmt.format(
        packages=packages,
        repodir=repodir,
        timestamp=timestamp,
        pkgname=pkgname,
    )
    return setup_text


if __name__ == '__main__':
    writeflag = utool.get_argflag(('--write', '-w'))
    overwriteflag = utool.get_argflag(('--yes', '-y'))
    repodir = utool.unixpath(os.getcwd())
    print('[utool] making setup.py for: %r' % repodir)
    setup_text = make_setup(repodir)
    if writeflag:
        setup_fpath = utool.unixjoin(repodir, 'setup.py')
        if utool.checkpath(setup_fpath):
            confirm_flag = overwriteflag
        else:
            confirm_flag = True
        if confirm_flag:
            utool.write_to(setup_fpath, setup_text)
        else:
            print('setup.py file exists not writing')
    else:
        print(setup_text)
コード例 #24
0
def write_default_ipython_profile():
    """
    CommandLine:
        python ~/local/init/init_ipython_config.py

        python -c "import utool as ut; ut.vd(ut.unixpath('~/.ipython/profile_default'))"
        python -c "import utool as ut; ut.editfile(ut.unixpath('~/.ipython/profile_default/ipython_config.py'))"

    References:
        http://2sn.org/python/ipython_config.py
    """
    dpath = ut.unixpath('~/.ipython/profile_default')
    ut.ensuredir(dpath, info=True, verbose=True)
    ipy_config_fpath = ut.unixjoin(dpath, 'ipython_config.py')
    ipy_config_text = ut.codeblock(
        r'''
        # STARTBLOCK
        c = get_config()  # NOQA
        c.InteractiveShellApp.exec_lines = []
        future_line = (
            'from __future__ import absolute_import, division, print_function, with_statement, unicode_literals')
        c.InteractiveShellApp.exec_lines.append(future_line)
        # Fix sip versions
        try:
            import sip
            # http://stackoverflow.com/questions/21217399/pyqt4-qtcore-qvariant-object-instead-of-a-string
            sip.setapi('QVariant', 2)
            sip.setapi('QString', 2)
            sip.setapi('QTextStream', 2)
            sip.setapi('QTime', 2)
            sip.setapi('QUrl', 2)
            sip.setapi('QDate', 2)
            sip.setapi('QDateTime', 2)
            if hasattr(sip, 'setdestroyonexit'):
                sip.setdestroyonexit(False)  # This prevents a crash on windows
        except ImportError as ex:
            pass
        except ValueError as ex:
            print('Warning: Value Error: %s' % str(ex))
            pass
        c.InteractiveShellApp.exec_lines.append('%load_ext autoreload')
        c.InteractiveShellApp.exec_lines.append('%autoreload 2')
        #c.InteractiveShellApp.exec_lines.append('%pylab qt4')
        c.InteractiveShellApp.exec_lines.append('import numpy as np')
        c.InteractiveShellApp.exec_lines.append('import utool as ut')
        #c.InteractiveShellApp.exec_lines.append('import plottool as pt')
        c.InteractiveShellApp.exec_lines.append('from os.path import *')
        c.InteractiveShellApp.exec_lines.append('from six.moves import cPickle as pickle')
        #c.InteractiveShellApp.exec_lines.append('if \'verbose\' not in vars():\\n    verbose = True')
        import utool as ut
        c.InteractiveShellApp.exec_lines.append(ut.codeblock(
            """
            class classproperty(property):
                def __get__(self, cls, owner):
                    return classmethod(self.fget).__get__(None, owner)()
            class vim(object):
                @classproperty
                def focus(cls):
                    import utool.util_ubuntu
                    utool.util_ubuntu.xctrl.do(('focus', 'GVIM'),)
                @classproperty
                def copy(cls):
                    import utool.util_ubuntu
                    utool.util_ubuntu.xctrl.do(('focus', 'GVIM'),)
                    import utool as ut
                    import IPython
                    ipy = IPython.get_ipython()
                    lastline = ipy.history_manager.input_hist_parsed[-2]
                    ut.copy_text_to_clipboard(lastline)
                    # import utool as ut
                    import utool.util_ubuntu
                    utool.util_ubuntu.xctrl.do(
                        ('focus', 'GVIM'),
                        ('key', 'ctrl+v'),
                        ('focus', 'x-terminal-emulator.X-terminal-emulator')
                    )
            """
        ))
        #c.InteractiveShell.autoindent = True
        #c.InteractiveShell.colors = 'LightBG'
        #c.InteractiveShell.confirm_exit = False
        #c.InteractiveShell.deep_reload = True
        c.InteractiveShell.editor = 'gvim'
        #c.InteractiveShell.xmode = 'Context'
        # ENDBOCK
        '''
    )
    ut.write_to(ipy_config_fpath, ipy_config_text)
コード例 #25
0
    def __init__(
        qres_wgt,
        ibs,
        cm_list,
        parent=None,
        callback=None,
        qreq_=None,
        query_title='',
        review_cfg={},
    ):
        if ut.VERBOSE:
            logger.info('[qres_wgt] Init QueryResultsWidget')

        assert not isinstance(cm_list, dict)
        assert qreq_ is not None, 'must specify qreq_'

        if USE_FILTER_PROXY:
            super(QueryResultsWidget,
                  qres_wgt).__init__(parent=parent,
                                     model_class=CustomFilterModel)
        else:
            super(QueryResultsWidget, qres_wgt).__init__(parent=parent)

        # if USE_FILTER_PROXY:
        #    APIItemWidget.__init__(qres_wgt, parent=parent,
        #                            model_class=CustomFilterModel)
        # else:
        #    APIItemWidget.__init__(qres_wgt, parent=parent)

        qres_wgt.cm_list = cm_list
        qres_wgt.ibs = ibs
        qres_wgt.qreq_ = qreq_
        qres_wgt.query_title = query_title
        qres_wgt.qaid2_cm = dict([(cm.qaid, cm) for cm in cm_list])

        qres_wgt.review_cfg = id_review_api.REVIEW_CFG_DEFAULTS.copy()
        qres_wgt.review_cfg = ut.update_existing(qres_wgt.review_cfg,
                                                 review_cfg,
                                                 assert_exists=True)

        # qres_wgt.altkey_shortcut =
        # QtWidgets.QShortcut(QtGui.QKeySequence(QtCore.Qt.ALT), qres_wgt,
        #                qres_wgt.on_alt_pressed,
        #                context=QtCore..Qt.WidgetShortcut)
        qres_wgt.button_list = None
        qres_wgt.show_new = True
        qres_wgt.show_join = True
        qres_wgt.show_split = True
        qres_wgt.tt = ut.tic()
        # Set results data
        if USE_FILTER_PROXY:
            qres_wgt.add_checkboxes(qres_wgt.show_new, qres_wgt.show_join,
                                    qres_wgt.show_split)

        lbl = gt.newLineEdit(
            qres_wgt,
            text=
            "'T' marks as correct match. 'F' marks as incorrect match. Alt brings up context menu. Double click a row to inspect matches.",
            editable=False,
            enabled=False,
        )
        qres_wgt.layout().setSpacing(0)
        qres_wgt_layout = qres_wgt.layout()
        if hasattr(qres_wgt_layout, 'setMargin'):
            qres_wgt_layout.setMargin(0)
        else:
            qres_wgt_layout.setContentsMargins(0, 0, 0, 0)
        bottom_bar = gt.newWidget(qres_wgt,
                                  orientation=Qt.Horizontal,
                                  spacing=0,
                                  margin=0)
        bottom_bar.layout().setSpacing(0)
        bottom_bar_layout = bottom_bar.layout()
        if hasattr(bottom_bar_layout, 'setMargin'):
            bottom_bar_layout.setMargin(0)
        else:
            bottom_bar_layout.setContentsMargins(0, 0, 0, 0)
        lbl.setMinimumSize(0, 0)
        lbl.setSizePolicy(QtWidgets.QSizePolicy.Expanding,
                          QtWidgets.QSizePolicy.Ignored)
        # lbl.setSizePolicy(gt.newSizePolicy())

        qres_wgt.layout().addWidget(bottom_bar)
        bottom_bar.addWidget(lbl)
        bottom_bar.addNewButton(
            'Mark unreviewed with higher scores as correct',
            pressed=qres_wgt.mark_unreviewed_above_score_as_correct,
        )
        bottom_bar.addNewButton('Repopulate', pressed=qres_wgt.repopulate)
        bottom_bar.addNewButton('Edit Filters', pressed=qres_wgt.edit_filters)

        qres_wgt.setSizePolicy(gt.newSizePolicy())
        qres_wgt.repopulate()
        qres_wgt.connect_signals_and_slots()
        if callback is None:
            callback = partial(ut.identity, None)
        qres_wgt.callback = callback
        qres_wgt.view.setColumnHidden(0, False)
        qres_wgt.view.setColumnHidden(1, False)
        qres_wgt.view.connect_single_key_to_slot(gt.ALT_KEY,
                                                 qres_wgt.on_alt_pressed)
        qres_wgt.view.connect_keypress_to_slot(qres_wgt.on_special_key_pressed)
        if parent is None:
            # Register parentless QWidgets
            fig_presenter.register_qt4_win(qres_wgt)

        dbdir = qres_wgt.qreq_.ibs.get_dbdir()
        expt_dir = ut.ensuredir(ut.unixjoin(dbdir, 'SPECIAL_GGR_EXPT_LOGS'))
        review_log_dir = ut.ensuredir(ut.unixjoin(expt_dir, 'review_logs'))

        ts = ut.get_timestamp(isutc=True, timezone=True)
        log_fpath = ut.unixjoin(
            review_log_dir,
            'review_log_%s_%s.json' % (qres_wgt.qreq_.ibs.dbname, ts))

        # LOG ALL CHANGES MADE TO NAMES
        import logging

        # ut.vd(review_log_dir)
        # create logger with 'spam_application'
        logger_ = logging.getLogger('query_review')
        logger_.setLevel(logging.DEBUG)
        formatter = logging.Formatter(
            '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
        # create file handler which logs even debug messages
        fh = logging.FileHandler(log_fpath)
        fh.setLevel(logging.DEBUG)
        fh.setFormatter(formatter)
        logger_.addHandler(fh)

        # create console handler with a higher log level
        ch = logging.StreamHandler()
        ch.setLevel(logging.INFO)
        ch.setFormatter(formatter)
        logger_.addHandler(ch)

        qres_wgt.logger = logger
        logger_.info('START QUERY_RESULT_REVIEW')
        logger_.info('NUM CHIP_MATCH OBJECTS (len(cm_list)=%d)' %
                     (len(cm_list), ))
        logger_.info('NUM PAIRS TO EVIDENCE_DECISION (nRows=%d)' %
                     (qres_wgt.review_api.nRows, ))
        logger_.info('PARENT QUERY REQUEST (cfgstr=%s)' %
                     (qres_wgt.qreq_.get_cfgstr(with_input=True), ))
コード例 #26
0
ファイル: experiments.py プロジェクト: simplesoftMX/ibeis_cnn
def test_siamese_performance(model, data, labels, flat_metadata, dataname=''):
    r"""
    CommandLine:
        utprof.py -m ibeis_cnn --tf pz_patchmatch --db liberty --test --weights=liberty:current --arch=siaml2_128 --test
        python -m ibeis_cnn --tf netrun --db liberty --arch=siaml2_128 --test  --ensure
        python -m ibeis_cnn --tf netrun --db liberty --arch=siaml2_128 --test  --ensure --weights=new
        python -m ibeis_cnn --tf netrun --db liberty --arch=siaml2_128 --train --weights=new
        python -m ibeis_cnn --tf netrun --db pzmtest --weights=liberty:current --arch=siaml2_128 --test  # NOQA
        python -m ibeis_cnn --tf netrun --db pzmtest --weights=liberty:current --arch=siaml2_128
    """
    import vtool as vt
    import plottool as pt

    # TODO: save in model.trainind_dpath/diagnostics/figures
    ut.colorprint('\n[siam_perf] Testing Siamese Performance', 'white')
    #epoch_dpath = model.get_epoch_diagnostic_dpath()
    epoch_dpath = model.arch_dpath
    ut.vd(epoch_dpath)

    dataname += ' ' + model.get_history_hashid() + '\n'

    history_text = ut.list_str(model.era_history, newlines=True)

    ut.write_to(ut.unixjoin(epoch_dpath, 'era_history.txt'), history_text)

    #if True:
    #    import matplotlib as mpl
    #    mpl.rcParams['agg.path.chunksize'] = 100000

    #data   = data[::50]
    #labels = labels[::50]
    #from ibeis_cnn import utils
    #data, labels = utils.random_xy_sample(data, labels, 10000, model.data_per_label_input)

    FULL = not ut.get_argflag('--quick')

    fnum_gen = pt.make_fnum_nextgen()

    ut.colorprint('[siam_perf] Show era history', 'white')
    fig = model.show_era_loss(fnum=fnum_gen())
    pt.save_figure(fig=fig, dpath=epoch_dpath, dpi=180)

    # hack
    ut.colorprint('[siam_perf] Show weights image', 'white')
    fig = model.show_weights_image(fnum=fnum_gen())
    pt.save_figure(fig=fig, dpath=epoch_dpath, dpi=180)
    #model.draw_all_conv_layer_weights(fnum=fnum_gen())
    #model.imwrite_weights(1)
    #model.imwrite_weights(2)

    # Compute each type of score
    ut.colorprint('[siam_perf] Building Scores', 'white')
    test_outputs = model.predict2(model, data)
    network_output = test_outputs['network_output_determ']
    # hack converting network output to distances for non-descriptor networks
    if len(network_output.shape) == 2 and network_output.shape[1] == 1:
        cnn_scores = network_output.T[0]
    elif len(network_output.shape) == 1:
        cnn_scores = network_output
    elif len(network_output.shape) == 2 and network_output.shape[1] > 1:
        assert model.data_per_label_output == 2
        vecs1 = network_output[0::2]
        vecs2 = network_output[1::2]
        cnn_scores = vt.L2(vecs1, vecs2)
    else:
        assert False
    cnn_scores = cnn_scores.astype(np.float64)

    # Segfaults with the data passed in is large (AND MEMMAPPED apparently)
    # Fixed in hesaff implementation
    SIFT = FULL
    if SIFT:
        sift_scores, sift_list = test_sift_patchmatch_scores(data, labels)
        sift_scores = sift_scores.astype(np.float64)

    ut.colorprint('[siam_perf] Learning Encoders', 'white')
    # Learn encoders
    encoder_kw = {
        #'monotonize': False,
        'monotonize': True,
    }
    cnn_encoder = vt.ScoreNormalizer(**encoder_kw)
    cnn_encoder.fit(cnn_scores, labels)

    if SIFT:
        sift_encoder = vt.ScoreNormalizer(**encoder_kw)
        sift_encoder.fit(sift_scores, labels)

    # Visualize
    ut.colorprint('[siam_perf] Visualize Encoders', 'white')
    viz_kw = dict(
        with_scores=False,
        with_postbayes=False,
        with_prebayes=False,
        target_tpr=.95,
    )
    inter_cnn = cnn_encoder.visualize(
        figtitle=dataname + ' CNN scores. #data=' + str(len(data)),
        fnum=fnum_gen(), **viz_kw)
    if SIFT:
        inter_sift = sift_encoder.visualize(
            figtitle=dataname + ' SIFT scores. #data=' + str(len(data)),
            fnum=fnum_gen(), **viz_kw)

    # Save
    pt.save_figure(fig=inter_cnn.fig, dpath=epoch_dpath)
    if SIFT:
        pt.save_figure(fig=inter_sift.fig, dpath=epoch_dpath)

    # Save out examples of hard errors
    #cnn_fp_label_indicies, cnn_fn_label_indicies =
    #cnn_encoder.get_error_indicies(cnn_scores, labels)
    #sift_fp_label_indicies, sift_fn_label_indicies =
    #sift_encoder.get_error_indicies(sift_scores, labels)

    with_patch_examples = FULL
    if with_patch_examples:
        ut.colorprint('[siam_perf] Visualize Confusion Examples', 'white')
        cnn_indicies = cnn_encoder.get_confusion_indicies(cnn_scores, labels)
        if SIFT:
            sift_indicies = sift_encoder.get_confusion_indicies(sift_scores, labels)

        warped_patch1_list, warped_patch2_list = list(zip(*ut.ichunks(data, 2)))
        samp_args = (warped_patch1_list, warped_patch2_list, labels)
        _sample = functools.partial(draw_results.get_patch_sample_img, *samp_args)

        cnn_fp_img = _sample({'fs': cnn_scores}, cnn_indicies.fp)[0]
        cnn_fn_img = _sample({'fs': cnn_scores}, cnn_indicies.fn)[0]
        cnn_tp_img = _sample({'fs': cnn_scores}, cnn_indicies.tp)[0]
        cnn_tn_img = _sample({'fs': cnn_scores}, cnn_indicies.tn)[0]

        if SIFT:
            sift_fp_img = _sample({'fs': sift_scores}, sift_indicies.fp)[0]
            sift_fn_img = _sample({'fs': sift_scores}, sift_indicies.fn)[0]
            sift_tp_img = _sample({'fs': sift_scores}, sift_indicies.tp)[0]
            sift_tn_img = _sample({'fs': sift_scores}, sift_indicies.tn)[0]

        #if ut.show_was_requested():
        #def rectify(arr):
        #    return np.flipud(arr)
        SINGLE_FIG = False
        if SINGLE_FIG:
            def dump_img(img_, lbl, fnum):
                fig, ax = pt.imshow(img_, figtitle=dataname + ' ' + lbl, fnum=fnum)
                pt.save_figure(fig=fig, dpath=epoch_dpath, dpi=180)
            dump_img(cnn_fp_img, 'cnn_fp_img', fnum_gen())
            dump_img(cnn_fn_img, 'cnn_fn_img', fnum_gen())
            dump_img(cnn_tp_img, 'cnn_tp_img', fnum_gen())
            dump_img(cnn_tn_img, 'cnn_tn_img', fnum_gen())

            dump_img(sift_fp_img, 'sift_fp_img', fnum_gen())
            dump_img(sift_fn_img, 'sift_fn_img', fnum_gen())
            dump_img(sift_tp_img, 'sift_tp_img', fnum_gen())
            dump_img(sift_tn_img, 'sift_tn_img', fnum_gen())
            #vt.imwrite(dataname + '_' + 'cnn_fp_img.png', (cnn_fp_img))
            #vt.imwrite(dataname + '_' + 'cnn_fn_img.png', (cnn_fn_img))
            #vt.imwrite(dataname + '_' + 'sift_fp_img.png', (sift_fp_img))
            #vt.imwrite(dataname + '_' + 'sift_fn_img.png', (sift_fn_img))
        else:
            print('Drawing TP FP TN FN')
            fnum = fnum_gen()
            pnum_gen = pt.make_pnum_nextgen(4, 2)
            fig = pt.figure(fnum)
            pt.imshow(cnn_fp_img,  title='CNN FP',  fnum=fnum, pnum=pnum_gen())
            pt.imshow(sift_fp_img, title='SIFT FP', fnum=fnum, pnum=pnum_gen())
            pt.imshow(cnn_fn_img,  title='CNN FN',  fnum=fnum, pnum=pnum_gen())
            pt.imshow(sift_fn_img, title='SIFT FN', fnum=fnum, pnum=pnum_gen())
            pt.imshow(cnn_tp_img,  title='CNN TP',  fnum=fnum, pnum=pnum_gen())
            pt.imshow(sift_tp_img, title='SIFT TP', fnum=fnum, pnum=pnum_gen())
            pt.imshow(cnn_tn_img,  title='CNN TN',  fnum=fnum, pnum=pnum_gen())
            pt.imshow(sift_tn_img, title='SIFT TN', fnum=fnum, pnum=pnum_gen())
            pt.set_figtitle(dataname + ' confusions')
            pt.adjust_subplots(left=0, right=1.0, bottom=0., wspace=.01, hspace=.05)
            pt.save_figure(fig=fig, dpath=epoch_dpath, dpi=180, figsize=(9, 18))

    with_patch_desc = FULL
    if with_patch_desc:
        ut.colorprint('[siam_perf] Visualize Patch Descriptors', 'white')
        fnum = fnum_gen()
        fig = pt.figure(fnum=fnum, pnum=(1, 1, 1))
        num_rows = 7
        pnum_gen = pt.make_pnum_nextgen(num_rows, 3)
        # Compare actual output descriptors
        for index in ut.random_indexes(len(sift_list), num_rows):
            vec_sift = sift_list[index]
            vec_cnn = network_output[index]
            patch = data[index]
            pt.imshow(patch, fnum=fnum, pnum=pnum_gen())
            pt.plot_descriptor_signature(vec_cnn, 'cnn vec',  fnum=fnum, pnum=pnum_gen())
            pt.plot_sift_signature(vec_sift, 'sift vec',  fnum=fnum, pnum=pnum_gen())
        pt.set_figtitle('Patch Descriptors')
        pt.adjust_subplots(left=0, right=0.95, bottom=0., wspace=.1, hspace=.15)
        pt.save_figure(fig=fig, dpath=epoch_dpath, dpi=180, figsize=(9, 18))
コード例 #27
0
def make_single_testres(
    ibs,
    qaids,
    daids,
    pipecfg_list,
    cfgx2_lbl,
    cfgdict_list,
    lbl,
    testnameid,
    use_cache=None,
    subindexer_partial=ut.ProgIter,
):
    """
    CommandLine:
        python -m wbia run_expt
    """
    cfgslice = None
    if cfgslice is not None:
        pipecfg_list = pipecfg_list[cfgslice]

    dbname = ibs.get_dbname()

    # if ut.NOT_QUIET:
    #     logger.info('[harn] Make single testres')

    cfgx2_qreq_ = [
        ibs.new_query_request(qaids, daids, verbose=False, query_cfg=pipe_cfg)
        for pipe_cfg in ut.ProgIter(
            pipecfg_list, lbl='Building qreq_', enabled=False)
    ]

    if use_cache is None:
        use_cache = USE_BIG_TEST_CACHE

    if use_cache:
        try:
            bt_cachedir = ut.ensuredir(
                (ibs.get_cachedir(), 'BULK_TEST_CACHE2'))
            cfgstr_list = [
                qreq_.get_cfgstr(with_input=True) for qreq_ in cfgx2_qreq_
            ]
            bt_cachestr = ut.hashstr_arr27(cfgstr_list,
                                           ibs.get_dbname() + '_cfgs')
            bt_cachename = 'BULKTESTCACHE2_v2'
            testres = ut.load_cache(bt_cachedir, bt_cachename, bt_cachestr)
            testres.cfgdict_list = cfgdict_list
            testres.cfgx2_lbl = cfgx2_lbl  # hack override
        except IOError:
            pass
        else:
            if ut.NOT_QUIET:
                ut.colorprint('[harn] single testres cache hit... returning',
                              'brightcyan')
            return testres

    if ibs.table_cache:
        # HACK
        prev_feat_cfgstr = None

    cfgx2_cmsinfo = []
    cfgiter = subindexer_partial(range(len(cfgx2_qreq_)),
                                 lbl='pipe config',
                                 freq=1,
                                 adjust=False)
    # Run each pipeline configuration
    for cfgx in cfgiter:
        qreq_ = cfgx2_qreq_[cfgx]
        cprint = ut.colorprint
        cprint('testnameid=%r' % (testnameid, ), 'green')
        cprint(
            'annot_cfgstr = %s' %
            (qreq_.get_cfgstr(with_input=True, with_pipe=False), ),
            'yellow',
        )
        cprint('pipe_cfgstr= %s' % (qreq_.get_cfgstr(with_data=False), ),
               'brightcyan')
        cprint('pipe_hashstr = %s' % (qreq_.get_pipe_hashid(), ), 'cyan')
        if DRY_RUN:
            continue

        indent_prefix = '[%s cfg %d/%d]' % (
            dbname,
            # cfgiter.count (doesnt work when quiet)
            (cfgiter.parent_index * cfgiter.length) + cfgx,
            cfgiter.length * cfgiter.parent_length,
        )

        with ut.Indenter(indent_prefix):
            # Run the test / read cache
            _need_compute = True
            if use_cache:
                # smaller cache for individual configuration runs
                st_cfgstr = qreq_.get_cfgstr(with_input=True)
                st_cachedir = ut.unixjoin(bt_cachedir, 'small_tests')
                st_cachename = 'smalltest'
                ut.ensuredir(st_cachedir)
                try:
                    cmsinfo = ut.load_cache(st_cachedir, st_cachename,
                                            st_cfgstr)
                except IOError:
                    _need_compute = True
                else:
                    _need_compute = False
            if _need_compute:
                assert not ibs.table_cache
                if ibs.table_cache:
                    if len(prev_feat_cfgstr is not None
                           and prev_feat_cfgstr != qreq_.qparams.feat_cfgstr):
                        # Clear features to preserve memory
                        ibs.clear_table_cache()
                        # qreq_.ibs.print_cachestats_str()
                cm_list = qreq_.execute()
                cmsinfo = test_result.build_cmsinfo(cm_list, qreq_)
                # record previous feature configuration
                if ibs.table_cache:
                    prev_feat_cfgstr = qreq_.qparams.feat_cfgstr
                if use_cache:
                    ut.save_cache(st_cachedir, st_cachename, st_cfgstr,
                                  cmsinfo)
        if not NOMEMORY:
            # Store the results
            cfgx2_cmsinfo.append(cmsinfo)
        else:
            cfgx2_qreq_[cfgx] = None
    if ut.NOT_QUIET:
        ut.colorprint('[harn] Completed running test configurations', 'white')
    if DRY_RUN:
        logger.info('ran tests dryrun mode.')
        return
    if NOMEMORY:
        logger.info('ran tests in memory savings mode. Cannot Print. exiting')
        return
    # Store all pipeline config results in a test result object
    testres = test_result.TestResult(pipecfg_list, cfgx2_lbl, cfgx2_cmsinfo,
                                     cfgx2_qreq_)
    testres.testnameid = testnameid
    testres.lbl = lbl
    testres.cfgdict_list = cfgdict_list
    testres.aidcfg = None
    if use_cache:
        try:
            ut.save_cache(bt_cachedir, bt_cachename, bt_cachestr, testres)
        except Exception as ex:
            ut.printex(ex, 'error saving testres cache', iswarning=True)
            if ut.SUPER_STRICT:
                raise
    return testres
コード例 #28
0
def run_asmk_script():
    with ut.embed_on_exception_context:  # NOQA
        """
    >>> from wbia.algo.smk.script_smk import *
    """

  # NOQA

        # ==============================================
        # PREPROCESSING CONFIGURATION
        # ==============================================
        config = {
            # 'data_year': 2013,
            'data_year': None,
            'dtype': 'float32',
            # 'root_sift': True,
            'root_sift': False,
            # 'centering': True,
            'centering': False,
            'num_words': 2**16,
            # 'num_words': 1E6
            # 'num_words': 8000,
            'kmeans_impl': 'sklearn.mini',
            'extern_words': False,
            'extern_assign': False,
            'assign_algo': 'kdtree',
            'checks': 1024,
            'int_rvec': True,
            'only_xy': False,
        }
        # Define which params are relevant for which operations
        relevance = {}
        relevance['feats'] = ['dtype', 'root_sift', 'centering', 'data_year']
        relevance['words'] = relevance['feats'] + [
            'num_words',
            'extern_words',
            'kmeans_impl',
        ]
        relevance['assign'] = relevance['words'] + [
            'checks',
            'extern_assign',
            'assign_algo',
        ]
        # relevance['ydata'] = relevance['assign'] + ['int_rvec']
        # relevance['xdata'] = relevance['assign'] + ['only_xy', 'int_rvec']

        nAssign = 1

        class SMKCacher(ut.Cacher):
            def __init__(self, fname, ext='.cPkl'):
                relevant_params = relevance[fname]
                relevant_cfg = ut.dict_subset(config, relevant_params)
                cfgstr = ut.get_cfg_lbl(relevant_cfg)
                dbdir = ut.truepath('/raid/work/Oxford/')
                super(SMKCacher, self).__init__(fname,
                                                cfgstr,
                                                cache_dir=dbdir,
                                                ext=ext)

        # ==============================================
        # LOAD DATASET, EXTRACT AND POSTPROCESS FEATURES
        # ==============================================
        if config['data_year'] == 2007:
            data = load_oxford_2007()
        elif config['data_year'] == 2013:
            data = load_oxford_2013()
        elif config['data_year'] is None:
            data = load_oxford_wbia()

        offset_list = data['offset_list']
        all_kpts = data['all_kpts']
        raw_vecs = data['all_vecs']
        query_uri_order = data['query_uri_order']
        data_uri_order = data['data_uri_order']
        # del data

        # ================
        # PRE-PROCESS
        # ================
        import vtool as vt

        # Alias names to avoid errors in interactive sessions
        proc_vecs = raw_vecs
        del raw_vecs

        feats_cacher = SMKCacher('feats', ext='.npy')
        all_vecs = feats_cacher.tryload()
        if all_vecs is None:
            if config['dtype'] == 'float32':
                logger.info('Converting vecs to float32')
                proc_vecs = proc_vecs.astype(np.float32)
            else:
                proc_vecs = proc_vecs
                raise NotImplementedError('other dtype')

            if config['root_sift']:
                with ut.Timer('Apply root sift'):
                    np.sqrt(proc_vecs, out=proc_vecs)
                    vt.normalize(proc_vecs, ord=2, axis=1, out=proc_vecs)

            if config['centering']:
                with ut.Timer('Apply centering'):
                    mean_vec = np.mean(proc_vecs, axis=0)
                    # Center and then re-normalize
                    np.subtract(proc_vecs, mean_vec[None, :], out=proc_vecs)
                    vt.normalize(proc_vecs, ord=2, axis=1, out=proc_vecs)

            if config['dtype'] == 'int8':
                smk_funcs

            all_vecs = proc_vecs
            feats_cacher.save(all_vecs)
        del proc_vecs

        # =====================================
        # BUILD VISUAL VOCABULARY
        # =====================================
        if config['extern_words']:
            words = data['words']
            assert config['num_words'] is None or len(
                words) == config['num_words']
        else:
            word_cacher = SMKCacher('words')
            words = word_cacher.tryload()
            if words is None:
                with ut.embed_on_exception_context:
                    if config['kmeans_impl'] == 'sklearn.mini':
                        import sklearn.cluster

                        rng = np.random.RandomState(13421421)
                        # init_size = int(config['num_words'] * 8)
                        init_size = int(config['num_words'] * 4)
                        # converged after 26043 iterations
                        clusterer = sklearn.cluster.MiniBatchKMeans(
                            config['num_words'],
                            init_size=init_size,
                            batch_size=1000,
                            compute_labels=False,
                            max_iter=20,
                            random_state=rng,
                            n_init=1,
                            verbose=1,
                        )
                        clusterer.fit(all_vecs)
                        words = clusterer.cluster_centers_
                    elif config['kmeans_impl'] == 'yael':
                        from yael import ynumpy

                        centroids, qerr, dis, assign, nassign = ynumpy.kmeans(
                            all_vecs,
                            config['num_words'],
                            init='kmeans++',
                            verbose=True,
                            output='all',
                        )
                        words = centroids
                    word_cacher.save(words)

        # =====================================
        # ASSIGN EACH VECTOR TO ITS NEAREST WORD
        # =====================================
        if config['extern_assign']:
            assert config[
                'extern_words'], 'need extern cluster to extern assign'
            idx_to_wxs = vt.atleast_nd(data['idx_to_wx'], 2)
            idx_to_maws = np.ones(idx_to_wxs.shape, dtype=np.float32)
            idx_to_wxs = np.ma.array(idx_to_wxs)
            idx_to_maws = np.ma.array(idx_to_maws)
        else:
            from wbia.algo.smk import vocab_indexer

            vocab = vocab_indexer.VisualVocab(words)
            dassign_cacher = SMKCacher('assign')
            assign_tup = dassign_cacher.tryload()
            if assign_tup is None:
                vocab.flann_params['algorithm'] = config['assign_algo']
                vocab.build()
                # Takes 12 minutes to assign jegous vecs to 2**16 vocab
                with ut.Timer('assign vocab neighbors'):
                    _idx_to_wx, _idx_to_wdist = vocab.nn_index(
                        all_vecs, nAssign, checks=config['checks'])
                    if nAssign > 1:
                        idx_to_wxs, idx_to_maws = smk_funcs.weight_multi_assigns(
                            _idx_to_wx,
                            _idx_to_wdist,
                            massign_alpha=1.2,
                            massign_sigma=80.0,
                            massign_equal_weights=True,
                        )
                    else:
                        idx_to_wxs = np.ma.masked_array(_idx_to_wx,
                                                        fill_value=-1)
                        idx_to_maws = np.ma.ones(idx_to_wxs.shape,
                                                 fill_value=-1,
                                                 dtype=np.float32)
                        idx_to_maws.mask = idx_to_wxs.mask
                assign_tup = (idx_to_wxs, idx_to_maws)
                dassign_cacher.save(assign_tup)

        idx_to_wxs, idx_to_maws = assign_tup

        # Breakup vectors, keypoints, and word assignments by annotation
        wx_lists = [
            idx_to_wxs[left:right] for left, right in ut.itertwo(offset_list)
        ]
        maw_lists = [
            idx_to_maws[left:right] for left, right in ut.itertwo(offset_list)
        ]
        vecs_list = [
            all_vecs[left:right] for left, right in ut.itertwo(offset_list)
        ]
        kpts_list = [
            all_kpts[left:right] for left, right in ut.itertwo(offset_list)
        ]

        # =======================
        # FIND QUERY SUBREGIONS
        # =======================

        ibs, query_annots, data_annots, qx_to_dx = load_ordered_annots(
            data_uri_order, query_uri_order)
        daids = data_annots.aids
        qaids = query_annots.aids

        query_super_kpts = ut.take(kpts_list, qx_to_dx)
        query_super_vecs = ut.take(vecs_list, qx_to_dx)
        query_super_wxs = ut.take(wx_lists, qx_to_dx)
        query_super_maws = ut.take(maw_lists, qx_to_dx)
        # Mark which keypoints are within the bbox of the query
        query_flags_list = []
        only_xy = config['only_xy']
        for kpts_, bbox in zip(query_super_kpts, query_annots.bboxes):
            flags = kpts_inside_bbox(kpts_, bbox, only_xy=only_xy)
            query_flags_list.append(flags)

        logger.info('Queries are crops of existing database images.')
        logger.info('Looking at average percents')
        percent_list = [
            flags_.sum() / flags_.shape[0] for flags_ in query_flags_list
        ]
        percent_stats = ut.get_stats(percent_list)
        logger.info('percent_stats = %s' % (ut.repr4(percent_stats), ))

        import vtool as vt

        query_kpts = vt.zipcompress(query_super_kpts, query_flags_list, axis=0)
        query_vecs = vt.zipcompress(query_super_vecs, query_flags_list, axis=0)
        query_wxs = vt.zipcompress(query_super_wxs, query_flags_list, axis=0)
        query_maws = vt.zipcompress(query_super_maws, query_flags_list, axis=0)

        # =======================
        # CONSTRUCT QUERY / DATABASE REPR
        # =======================

        # int_rvec = not config['dtype'].startswith('float')
        int_rvec = config['int_rvec']

        X_list = []
        _prog = ut.ProgPartial(length=len(qaids),
                               label='new X',
                               bs=True,
                               adjust=True)
        for aid, fx_to_wxs, fx_to_maws in _prog(
                zip(qaids, query_wxs, query_maws)):
            X = new_external_annot(aid, fx_to_wxs, fx_to_maws, int_rvec)
            X_list.append(X)

        # ydata_cacher = SMKCacher('ydata')
        # Y_list = ydata_cacher.tryload()
        # if Y_list is None:
        Y_list = []
        _prog = ut.ProgPartial(length=len(daids),
                               label='new Y',
                               bs=True,
                               adjust=True)
        for aid, fx_to_wxs, fx_to_maws in _prog(zip(daids, wx_lists,
                                                    maw_lists)):
            Y = new_external_annot(aid, fx_to_wxs, fx_to_maws, int_rvec)
            Y_list.append(Y)
        # ydata_cacher.save(Y_list)

        # ======================
        # Add in some groundtruth

        logger.info('Add in some groundtruth')
        for Y, nid in zip(Y_list, ibs.get_annot_nids(daids)):
            Y.nid = nid

        for X, nid in zip(X_list, ibs.get_annot_nids(qaids)):
            X.nid = nid

        for Y, qual in zip(Y_list, ibs.get_annot_quality_texts(daids)):
            Y.qual = qual

        # ======================
        # Add in other properties
        for Y, vecs, kpts in zip(Y_list, vecs_list, kpts_list):
            Y.vecs = vecs
            Y.kpts = kpts

        imgdir = ut.truepath('/raid/work/Oxford/oxbuild_images')
        for Y, imgid in zip(Y_list, data_uri_order):
            gpath = ut.unixjoin(imgdir, imgid + '.jpg')
            Y.gpath = gpath

        for X, vecs, kpts in zip(X_list, query_vecs, query_kpts):
            X.kpts = kpts
            X.vecs = vecs

        # ======================
        logger.info('Building inverted list')
        daids = [Y.aid for Y in Y_list]
        # wx_list = sorted(ut.list_union(*[Y.wx_list for Y in Y_list]))
        wx_list = sorted(set.union(*[Y.wx_set for Y in Y_list]))
        assert daids == data_annots.aids
        assert len(wx_list) <= config['num_words']

        wx_to_aids = smk_funcs.invert_lists(daids, [Y.wx_list for Y in Y_list],
                                            all_wxs=wx_list)

        # Compute IDF weights
        logger.info('Compute IDF weights')
        ndocs_total = len(daids)
        # Use only the unique number of words
        ndocs_per_word = np.array([len(set(wx_to_aids[wx])) for wx in wx_list])
        logger.info('ndocs_perword stats: ' +
                    ut.repr4(ut.get_stats(ndocs_per_word)))
        idf_per_word = smk_funcs.inv_doc_freq(ndocs_total, ndocs_per_word)
        wx_to_weight = dict(zip(wx_list, idf_per_word))
        logger.info('idf stats: ' +
                    ut.repr4(ut.get_stats(wx_to_weight.values())))

        # Filter junk
        Y_list_ = [Y for Y in Y_list if Y.qual != 'junk']

        # =======================
        # CHOOSE QUERY KERNEL
        # =======================
        params = {
            'asmk': dict(alpha=3.0, thresh=0.0),
            'bow': dict(),
            'bow2': dict(),
        }
        # method = 'bow'
        method = 'bow2'
        method = 'asmk'
        smk = SMK(wx_to_weight, method=method, **params[method])

        # Specific info for the type of query
        if method == 'asmk':
            # Make residual vectors
            if True:
                # The stacked way is 50x faster
                # TODO: extend for multi-assignment and record fxs
                flat_query_vecs = np.vstack(query_vecs)
                flat_query_wxs = np.vstack(query_wxs)
                flat_query_offsets = np.array(
                    [0] + ut.cumsum(ut.lmap(len, query_wxs)))

                flat_wxs_assign = flat_query_wxs
                flat_offsets = flat_query_offsets
                flat_vecs = flat_query_vecs
                tup = smk_funcs.compute_stacked_agg_rvecs(
                    words, flat_wxs_assign, flat_vecs, flat_offsets)
                all_agg_vecs, all_error_flags, agg_offset_list = tup
                if int_rvec:
                    all_agg_vecs = smk_funcs.cast_residual_integer(
                        all_agg_vecs)
                agg_rvecs_list = [
                    all_agg_vecs[left:right]
                    for left, right in ut.itertwo(agg_offset_list)
                ]
                agg_flags_list = [
                    all_error_flags[left:right]
                    for left, right in ut.itertwo(agg_offset_list)
                ]

                for X, agg_rvecs, agg_flags in zip(X_list, agg_rvecs_list,
                                                   agg_flags_list):
                    X.agg_rvecs = agg_rvecs
                    X.agg_flags = agg_flags[:, None]

                flat_wxs_assign = idx_to_wxs
                flat_offsets = offset_list
                flat_vecs = all_vecs
                tup = smk_funcs.compute_stacked_agg_rvecs(
                    words, flat_wxs_assign, flat_vecs, flat_offsets)
                all_agg_vecs, all_error_flags, agg_offset_list = tup
                if int_rvec:
                    all_agg_vecs = smk_funcs.cast_residual_integer(
                        all_agg_vecs)

                agg_rvecs_list = [
                    all_agg_vecs[left:right]
                    for left, right in ut.itertwo(agg_offset_list)
                ]
                agg_flags_list = [
                    all_error_flags[left:right]
                    for left, right in ut.itertwo(agg_offset_list)
                ]

                for Y, agg_rvecs, agg_flags in zip(Y_list, agg_rvecs_list,
                                                   agg_flags_list):
                    Y.agg_rvecs = agg_rvecs
                    Y.agg_flags = agg_flags[:, None]
            else:
                # This non-stacked way is about 500x slower
                _prog = ut.ProgPartial(label='agg Y rvecs',
                                       bs=True,
                                       adjust=True)
                for Y in _prog(Y_list_):
                    make_agg_vecs(Y, words, Y.vecs)

                _prog = ut.ProgPartial(label='agg X rvecs',
                                       bs=True,
                                       adjust=True)
                for X in _prog(X_list):
                    make_agg_vecs(X, words, X.vecs)
        elif method == 'bow2':
            # Hack for orig tf-idf bow vector
            nwords = len(words)
            for X in ut.ProgIter(X_list, label='make bow vector'):
                ensure_tf(X)
                bow_vector(X, wx_to_weight, nwords)

            for Y in ut.ProgIter(Y_list_, label='make bow vector'):
                ensure_tf(Y)
                bow_vector(Y, wx_to_weight, nwords)

        if method != 'bow2':
            for X in ut.ProgIter(X_list, 'compute X gamma'):
                X.gamma = smk.gamma(X)
            for Y in ut.ProgIter(Y_list_, 'compute Y gamma'):
                Y.gamma = smk.gamma(Y)

        # Execute matches (could go faster by enumerating candidates)
        scores_list = []
        for X in ut.ProgIter(X_list, label='query %s' % (smk, )):
            scores = [smk.kernel(X, Y) for Y in Y_list_]
            scores = np.array(scores)
            scores = np.nan_to_num(scores)
            scores_list.append(scores)

        import sklearn.metrics

        avep_list = []
        _iter = list(zip(scores_list, X_list))
        _iter = ut.ProgIter(_iter, label='evaluate %s' % (smk, ))
        for scores, X in _iter:
            truth = [X.nid == Y.nid for Y in Y_list_]
            avep = sklearn.metrics.average_precision_score(truth, scores)
            avep_list.append(avep)
        avep_list = np.array(avep_list)
        mAP = np.mean(avep_list)
        logger.info('mAP  = %r' % (mAP, ))
コード例 #29
0
def test_reloading_metaclass():
    r"""
    CommandLine:
        python -m utool.util_class --test-test_reloading_metaclass

    References:
        http://stackoverflow.com/questions/8122734/pythons-imp-reload-function-is-not-working

    Example:
        >>> # ENABLE_DOCTEST
        >>> from utool.util_class import *  # NOQA
        >>> result = test_reloading_metaclass()
        >>> print(result)
    """
    import utool as ut
    testdir = ut.ensure_app_resource_dir('utool', 'metaclass_tests')
    testfoo_fpath = ut.unixjoin(testdir, 'testfoo.py')
    # os.chdir(testdir)
    #with ut.ChdirContext(testdir, stay=ut.inIPython()):
    with ut.ChdirContext(testdir):
        foo_code1 = ut.codeblock(
            r'''
            # STARTBLOCK
            import utool as ut
            import six


            @six.add_metaclass(ut.ReloadingMetaclass)
            class Foo(object):
                def __init__(self):
                    pass

            spamattr = 'version1'
            # ENDBLOCK
            '''
        )
        foo_code2 = ut.codeblock(
            r'''
            # STARTBLOCK
            import utool as ut
            import six


            @six.add_metaclass(ut.ReloadingMetaclass)
            class Foo(object):
                def __init__(self):
                    pass

                def bar(self):
                    return 'spam'

            eggsattr = 'version2'
            # ENDBLOCK
            '''
        )
        # Write a testclass to disk
        ut.delete(testfoo_fpath)
        ut.write_to(testfoo_fpath, foo_code1, verbose=True)
        testfoo = ut.import_module_from_fpath(testfoo_fpath)
        #import testfoo
        foo = testfoo.Foo()
        print('foo = %r' % (foo,))
        assert not hasattr(foo, 'bar'), 'foo should not have a bar attr'
        ut.delete(testfoo_fpath + 'c')  # remove the pyc file because of the identical creation time
        ut.write_to(testfoo_fpath, foo_code2, verbose=True)
        assert not hasattr(foo, 'bar'), 'foo should still not have a bar attr'
        foo.rrr()
        assert foo.bar() == 'spam'
        ut.delete(testfoo_fpath)
        print('Reloading worked nicely')
コード例 #30
0
ファイル: id_review_api.py プロジェクト: simplesoftMX/ibeis
def ensure_match_img(ibs, cm, daid, qreq_=None, match_thumbtup_cache={}):
    r"""
    CommandLine:
        python -m ibeis.gui.id_review_api --test-ensure_match_img --show

    Example:
        >>> # ENABLE_DOCTEST
        >>> from ibeis.gui.id_review_api import *  # NOQA
        >>> import ibeis
        >>> # build test data
        >>> cm, qreq_ = ibeis.testdata_cm()
        >>> daid = cm.get_top_aids()[0]
        >>> match_thumbtup_cache = {}
        >>> # execute function
        >>> match_thumb_fpath_ = ensure_match_img(qreq_.ibs, cm, daid, qreq_,
        >>>                                       match_thumbtup_cache)
        >>> # verify results
        >>> result = str(match_thumb_fpath_)
        >>> print(result)
        >>> ut.quit_if_noshow()
        >>> ut.startfile(match_thumb_fpath_, quote=True)
    """
    #from os.path import exists
    match_thumbdir = ibs.get_match_thumbdir()
    match_thumb_fname = get_match_thumb_fname(cm, daid, qreq_)
    match_thumb_fpath_ = ut.unixjoin(match_thumbdir, match_thumb_fname)
    #if exists(match_thumb_fpath_):
    #    return match_thumb_fpath_
    if match_thumb_fpath_ in match_thumbtup_cache:
        fpath = match_thumbtup_cache[match_thumb_fpath_]
    else:
        # TODO: just draw the image at the correct thumbnail size
        # TODO: draw without matplotlib?
        #with ut.Timer('render-1'):
        fpath = cm.imwrite_single_annotmatch(
            qreq_, daid, fpath=match_thumb_fpath_, saveax=True, fnum=32,
            notitle=True, verbose=False)
        #with ut.Timer('render-2'):
        #    img = cm.render_single_annotmatch(qreq_, daid, fnum=32, notitle=True, dpi=30)
        #    cv2.imwrite(match_thumb_fpath_, img)
        #    fpath = match_thumb_fpath_
        #with ut.Timer('render-3'):
        #fpath = match_thumb_fpath_
        #render_config = {
        #    'dpi'              : 60,
        #    'draw_fmatches'    : True,
        #    #'vert'             : view_orientation == 'vertical',
        #    'show_aidstr'      : False,
        #    'show_name'        : False,
        #    'show_exemplar'    : False,
        #    'show_num_gt'      : False,
        #    'show_timedelta'   : False,
        #    'show_name_rank'   : False,
        #    'show_score'       : False,
        #    'show_annot_score' : False,
        #    'show_name_score'  : False,
        #    'draw_lbl'         : False,
        #    'draw_border'      : False,
        #}
        #cm.imwrite_single_annotmatch2(qreq_, daid, fpath, fnum=32, notitle=True, **render_config)
        #print('fpath = %r' % (fpath,))
        match_thumbtup_cache[match_thumb_fpath_] = fpath
    return fpath
コード例 #31
0
ファイル: util_class.py プロジェクト: Erotemic/utool
def test_reloading_metaclass():
    r"""
    CommandLine:
        python -m utool.util_class --test-test_reloading_metaclass

    References:
        http://stackoverflow.com/questions/8122734/pythons-imp-reload-function-is-not-working

    Example:
        >>> # ENABLE_DOCTEST
        >>> from utool.util_class import *  # NOQA
        >>> result = test_reloading_metaclass()
        >>> print(result)
    """
    import utool as ut
    testdir = ut.ensure_app_resource_dir('utool', 'metaclass_tests')
    testfoo_fpath = ut.unixjoin(testdir, 'testfoo.py')
    # os.chdir(testdir)
    #with ut.ChdirContext(testdir, stay=ut.inIPython()):
    with ut.ChdirContext(testdir):
        foo_code1 = ut.codeblock(
            r'''
            # STARTBLOCK
            import utool as ut
            import six


            @six.add_metaclass(ut.ReloadingMetaclass)
            class Foo(object):
                def __init__(self):
                    pass

            spamattr = 'version1'
            # ENDBLOCK
            '''
        )
        foo_code2 = ut.codeblock(
            r'''
            # STARTBLOCK
            import utool as ut
            import six


            @six.add_metaclass(ut.ReloadingMetaclass)
            class Foo(object):
                def __init__(self):
                    pass

                def bar(self):
                    return 'spam'

            eggsattr = 'version2'
            # ENDBLOCK
            '''
        )
        # Write a testclass to disk
        ut.delete(testfoo_fpath)
        ut.write_to(testfoo_fpath, foo_code1, verbose=True)
        testfoo = ut.import_module_from_fpath(testfoo_fpath)
        #import testfoo
        foo = testfoo.Foo()
        print('foo = %r' % (foo,))
        assert not hasattr(foo, 'bar'), 'foo should not have a bar attr'
        ut.delete(testfoo_fpath + 'c')  # remove the pyc file because of the identical creation time
        ut.write_to(testfoo_fpath, foo_code2, verbose=True)
        assert not hasattr(foo, 'bar'), 'foo should still not have a bar attr'
        foo.rrr()
        assert foo.bar() == 'spam'
        ut.delete(testfoo_fpath)
        print('Reloading worked nicely')
コード例 #32
0
ファイル: id_review_api.py プロジェクト: simplesoftMX/ibeis
def make_ensure_match_img_nosql_func(qreq_, cm, daid):
    r"""
    CommandLine:
        python -m ibeis.gui.id_review_api --test-ensure_match_img --show

    Example:
        >>> # ENABLE_DOCTEST
        >>> from ibeis.gui.id_review_api import *  # NOQA
        >>> import ibeis
        >>> # build test data
        >>> cm, qreq_ = ibeis.testdata_cm()
        >>> ibs = qreq_.ibs
        >>> daid = cm.get_top_aids()[0]
        >>> match_thumbtup_cache = {}
        >>> # execute function
        >>> match_thumb_fpath_ = ensure_match_img(qreq_.ibs, cm, daid, qreq_, match_thumbtup_cache)
        >>> # verify results
        >>> result = str(match_thumb_fpath_)
        >>> print(result)
        >>> ut.quit_if_noshow()
        >>> ut.startfile(match_thumb_fpath_, quote=True)
    """
    #import ibeis.viz
    from ibeis.viz import viz_matches
    import cv2
    import io
    import plottool as pt
    import vtool as vt
    import matplotlib as mpl

    if cm.__class__.__name__ == 'PairwiseMatch':
        # HACK DO THIS THE VTOOL WAY
        match = cm
        ibs = qreq_  # VERY HACK
        match_thumbdir = ibs.get_match_thumbdir()
        cfgstr = hash(match.config)  # HACK only works if config is already a hashdict
        match_thumb_fname = 'tmpmatch-%d-%d-%s.jpg' % (match.annot1['aid'], match.annot2['aid'], cfgstr)
        fpath = ut.unixjoin(match_thumbdir, match_thumb_fname)

        def main_thread_load2():
            rchip1, kpts1 = ut.dict_take(match.annot1, ['rchip', 'kpts'])
            rchip2, kpts2 = ut.dict_take(match.annot2, ['rchip', 'kpts'])
            return (match,)

        def nosql_draw2(check_func, match):
            from matplotlib.backends.backend_agg import FigureCanvas
            try:
                from matplotlib.backends.backend_agg import Figure
            except ImportError:
                from matplotlib.figure import Figure

            was_interactive = mpl.is_interactive()
            if was_interactive:
                mpl.interactive(False)
            #fnum = 32
            fig = Figure()
            canvas = FigureCanvas(fig)  # NOQA
            #fig.clf()
            ax = fig.add_subplot(1, 1, 1)
            if check_func is not None and check_func():
                return
            ax, xywh1, xywh2 = match.show(ax=ax)
            if check_func is not None and check_func():
                return
            savekw = {
                # 'dpi' : 60,
                'dpi' : 80,
            }
            axes_extents = pt.extract_axes_extents(fig)
            #assert len(axes_extents) == 1, 'more than one axes'
            extent = axes_extents[0]
            with io.BytesIO() as stream:
                # This call takes 23% - 15% of the time depending on settings
                fig.savefig(stream, bbox_inches=extent, **savekw)
                stream.seek(0)
                data = np.fromstring(stream.getvalue(), dtype=np.uint8)
            if check_func is not None and check_func():
                return
            pt.plt.close(fig)
            image = cv2.imdecode(data, 1)
            thumbsize = 221
            max_dsize = (thumbsize, thumbsize)
            dsize, sx, sy = vt.resized_clamped_thumb_dims(vt.get_size(image), max_dsize)
            if check_func is not None and check_func():
                return
            image = vt.resize(image, dsize)
            vt.imwrite(fpath, image)
            if check_func is not None and check_func():
                return
            #fig.savefig(fpath, bbox_inches=extent, **savekw)
        #match_thumbtup_cache[match_thumb_fpath_] = fpath
        return fpath, nosql_draw2, main_thread_load2

    aid1 = cm.qaid
    aid2 = daid

    ibs = qreq_.ibs
    resize_factor = .5

    match_thumbdir = ibs.get_match_thumbdir()
    match_thumb_fname = get_match_thumb_fname(cm, daid, qreq_)
    fpath = ut.unixjoin(match_thumbdir, match_thumb_fname)

    def main_thread_load():
        # This gets executed in the main thread and collects data
        # from sql
        rchip1_fpath, rchip2_fpath, kpts1, kpts2 = viz_matches._get_annot_pair_info(
            ibs, aid1, aid2, qreq_, draw_fmatches=True, as_fpath=True)
        return rchip1_fpath, rchip2_fpath, kpts1, kpts2

    def nosql_draw(check_func, rchip1_fpath, rchip2_fpath, kpts1, kpts2):
        # This gets executed in the child thread and does drawing async style
        #from matplotlib.backends.backend_pdf import FigureCanvasPdf as FigureCanvas
        #from matplotlib.backends.backend_pdf import Figure
        #from matplotlib.backends.backend_svg import FigureCanvas
        #from matplotlib.backends.backend_svg import Figure
        from matplotlib.backends.backend_agg import FigureCanvas
        try:
            from matplotlib.backends.backend_agg import Figure
        except ImportError:
            from matplotlib.figure import Figure

        kpts1_ = vt.offset_kpts(kpts1, (0, 0), (resize_factor, resize_factor))
        kpts2_ = vt.offset_kpts(kpts2, (0, 0), (resize_factor, resize_factor))

        #from matplotlib.figure import Figure
        if check_func is not None and check_func():
            return

        rchip1 = vt.imread(rchip1_fpath)
        rchip1 = vt.resize_image_by_scale(rchip1, resize_factor)
        if check_func is not None and check_func():
            return
        rchip2 = vt.imread(rchip2_fpath)
        rchip2 = vt.resize_image_by_scale(rchip2, resize_factor)
        if check_func is not None and check_func():
            return

        try:
            idx = cm.daid2_idx[daid]
            fm   = cm.fm_list[idx]
            fsv  = None if cm.fsv_list is None else cm.fsv_list[idx]
            fs   = None if fsv is None else fsv.prod(axis=1)
        except KeyError:
            fm = []
            fs = None
            fsv = None

        maxnum = 200
        if fs is not None and len(fs) > maxnum:
            # HACK TO ONLY SHOW TOP MATCHES
            sortx = fs.argsort()[::-1]
            fm = fm.take(sortx[:maxnum], axis=0)
            fs = fs.take(sortx[:maxnum], axis=0)

        was_interactive = mpl.is_interactive()
        if was_interactive:
            mpl.interactive(False)
        #fnum = 32
        fig = Figure()
        canvas = FigureCanvas(fig)  # NOQA
        #fig.clf()
        ax = fig.add_subplot(1, 1, 1)
        if check_func is not None and check_func():
            return
        #fig = pt.plt.figure(fnum)
        #H1 = np.eye(3)
        #H2 = np.eye(3)
        #H1[0, 0] = .5
        #H1[1, 1] = .5
        #H2[0, 0] = .5
        #H2[1, 1] = .5
        ax, xywh1, xywh2 = pt.show_chipmatch2(rchip1, rchip2, kpts1_, kpts2_, fm,
                                              fs=fs, colorbar_=False, ax=ax)
        if check_func is not None and check_func():
            return
        savekw = {
            # 'dpi' : 60,
            'dpi' : 80,
        }
        axes_extents = pt.extract_axes_extents(fig)
        #assert len(axes_extents) == 1, 'more than one axes'
        extent = axes_extents[0]
        with io.BytesIO() as stream:
            # This call takes 23% - 15% of the time depending on settings
            fig.savefig(stream, bbox_inches=extent, **savekw)
            stream.seek(0)
            data = np.fromstring(stream.getvalue(), dtype=np.uint8)
        if check_func is not None and check_func():
            return
        pt.plt.close(fig)
        image = cv2.imdecode(data, 1)
        thumbsize = 221
        max_dsize = (thumbsize, thumbsize)
        dsize, sx, sy = vt.resized_clamped_thumb_dims(vt.get_size(image), max_dsize)
        if check_func is not None and check_func():
            return
        image = vt.resize(image, dsize)
        vt.imwrite(fpath, image)
        if check_func is not None and check_func():
            return
        #fig.savefig(fpath, bbox_inches=extent, **savekw)
    #match_thumbtup_cache[match_thumb_fpath_] = fpath
    return fpath, nosql_draw, main_thread_load
コード例 #33
0
ファイル: depends_cache.py プロジェクト: Erotemic/ibeis
    def get_col(table, tbl_rowids, colnames=None):
        """
        colnames = ('mask', 'size')

        FIXME; unpacking is confusing with sql controller
        """
        # print('Get prop of %r, colnames=%r' % (table, colnames))
        try:
            request_unpack = False
            if colnames is None:
                colnames = table.data_colnames
                #table._internal_data_colnames
            else:
                if isinstance(colnames, six.text_type):
                    request_unpack = True
                    colnames = (colnames,)
            # print('* colnames = %r' % (colnames,))

            eager = True
            nInput = None

            total = 0
            intern_colnames = []
            extern_resolve_colxs = []
            nesting_xs = []

            for c in colnames:
                if c in table.external_to_internal:
                    intern_colnames.append([table.external_to_internal[c]])
                    read_func = table.extern_read_funcs[c]
                    extern_resolve_colxs.append((total, read_func))
                    nesting_xs.append(total)
                    total += 1
                elif c in table.nested_to_flat:
                    nest = table.nested_to_flat[c]
                    nesting_xs.append(list(range(total, total + len(nest))))
                    intern_colnames.append(nest)
                    total += len(nest)
                else:
                    nesting_xs.append(total)
                    intern_colnames.append([c])
                    total += 1

            flat_intern_colnames = tuple(ut.flatten(intern_colnames))

            # do sql read
            # FIXME: understand unpack_scalars and keepwrap
            raw_prop_list = table.get_internal_columns(
                tbl_rowids, flat_intern_colnames, eager, nInput,
                unpack_scalars=True, keepwrap=True)
            # unpack_scalars=not
            # request_unpack)
            # print('depth(raw_prop_list) = %r' % (ut.depth_profile(raw_prop_list),))

            prop_listT = list(zip(*raw_prop_list))
            for extern_colx, read_func in extern_resolve_colxs:
                data_list = []
                for uri in prop_listT[extern_colx]:
                    try:
                        # FIXME: only do this for a localpath
                        uri1 = ut.unixjoin(table.depc.cache_dpath, uri)
                        data = read_func(uri1)
                    except Exception as ex:
                        ut.printex(ex, 'failed to load external data', iswarning=False)
                        raise
                        # FIXME
                        #data = None
                    data_list.append(data)
                prop_listT[extern_colx] = data_list

            nested_proplistT = ut.list_unflat_take(prop_listT, nesting_xs)

            for tx in ut.where([isinstance(xs, list) for xs in nesting_xs]):
                nested_proplistT[tx] = list(zip(*nested_proplistT[tx]))

            prop_list = list(zip(*nested_proplistT))

            if request_unpack:
                prop_list = [None if p is None else p[0] for p in prop_list]
        except Exception as ex:
            ut.printex(ex, 'failed in get col', keys=[
                'table.tablename',
                'request_unpack',
                'tbl_rowids',
                'colnames',
                'raw_prop_list',
                (ut.depth_profile, 'raw_prop_list'),
                'prop_listT',
                (ut.depth_profile, 'prop_listT'),
                'nesting_xs',
                'nested_proplistT',
                'prop_list'])
            raise
        return prop_list
コード例 #34
0
ファイル: harness.py プロジェクト: Erotemic/ibeis
def make_single_testres(ibs, qaids, daids, pipecfg_list, cfgx2_lbl,
                        cfgdict_list, lbl, testnameid, use_cache=None,
                        subindexer_partial=ut.ProgressIter):
    """
    CommandLine:
        python -m ibeis.expt.harness --exec-run_test_configurations2
    """
    cfgslice = None
    if cfgslice is not None:
        pipecfg_list = pipecfg_list[cfgslice]

    dbname = ibs.get_dbname()

    if ut.NOT_QUIET:
        print('[harn] Make single testres')

    cfgx2_qreq_ = [
        ibs.new_query_request(qaids, daids, verbose=False, query_cfg=pipe_cfg)
        for pipe_cfg in ut.ProgressIter(pipecfg_list, lbl='Building qreq_',
                                        enabled=False)
    ]

    if use_cache is None:
        use_cache = USE_BIG_TEST_CACHE

    if use_cache:
        get_big_test_cache_info(ibs, cfgx2_qreq_)
        try:
            cachetup = get_big_test_cache_info(ibs, cfgx2_qreq_)
            testres = ut.load_cache(*cachetup)
            testres.cfgdict_list = cfgdict_list
            testres.cfgx2_lbl = cfgx2_lbl  # hack override
        except IOError:
            pass
        else:
            if ut.NOT_QUIET:
                ut.colorprint('[harn] single testres cache hit... returning', 'turquoise')
            return testres

    if ibs.table_cache:
        # HACK
        prev_feat_cfgstr = None

    cfgx2_cfgresinfo = []
    #nPipeCfg = len(pipecfg_list)
    cfgiter = subindexer_partial(range(len(cfgx2_qreq_)),
                                 lbl='query config',
                                 freq=1, adjust=False,
                                 separate=True)
    # Run each pipeline configuration
    for cfgx in cfgiter:
        qreq_ = cfgx2_qreq_[cfgx]

        ut.colorprint('testnameid=%r' % (
            testnameid,), 'green')
        ut.colorprint('annot_cfgstr = %s' % (
            qreq_.get_cfgstr(with_input=True, with_pipe=False),), 'yellow')
        ut.colorprint('pipe_cfgstr= %s' % (
            qreq_.get_cfgstr(with_data=False),), 'turquoise')
        ut.colorprint('pipe_hashstr = %s' % (
            qreq_.get_pipe_hashid(),), 'teal')
        if DRY_RUN:
            continue

        indent_prefix = '[%s cfg %d/%d]' % (
            dbname,
            # cfgiter.count (doesnt work when quiet)
            (cfgiter.parent_index * cfgiter.nTotal) + cfgx ,
            cfgiter.nTotal * cfgiter.parent_nTotal
        )

        with ut.Indenter(indent_prefix):
            # Run the test / read cache
            _need_compute = True
            if use_cache:
                # smaller cache for individual configuration runs
                st_cfgstr = qreq_.get_cfgstr(with_input=True)
                bt_cachedir = cachetup[0]
                st_cachedir = ut.unixjoin(bt_cachedir, 'small_tests')
                st_cachename = 'smalltest'
                ut.ensuredir(st_cachedir)
                try:
                    cfgres_info = ut.load_cache(st_cachedir, st_cachename, st_cfgstr)
                except IOError:
                    _need_compute = True
                else:
                    _need_compute = False
            if _need_compute:
                assert not ibs.table_cache
                if ibs.table_cache:
                    if (len(prev_feat_cfgstr is not None and
                            prev_feat_cfgstr != qreq_.qparams.feat_cfgstr)):
                        # Clear features to preserve memory
                        ibs.clear_table_cache()
                        #qreq_.ibs.print_cachestats_str()
                cfgres_info = get_query_result_info(qreq_)
                # record previous feature configuration
                if ibs.table_cache:
                    prev_feat_cfgstr = qreq_.qparams.feat_cfgstr
                if use_cache:
                    ut.save_cache(st_cachedir, st_cachename, st_cfgstr, cfgres_info)
        if not NOMEMORY:
            # Store the results
            cfgx2_cfgresinfo.append(cfgres_info)
        else:
            cfgx2_qreq_[cfgx] = None
    if ut.NOT_QUIET:
        ut.colorprint('[harn] Completed running test configurations', 'white')
    if DRY_RUN:
        print('ran tests dryrun mode.')
        return
    if NOMEMORY:
        print('ran tests in memory savings mode. Cannot Print. exiting')
        return
    # Store all pipeline config results in a test result object
    testres = test_result.TestResult(pipecfg_list, cfgx2_lbl, cfgx2_cfgresinfo, cfgx2_qreq_)
    testres.testnameid = testnameid
    testres.lbl = lbl
    testres.cfgdict_list = cfgdict_list
    testres.aidcfg = None
    if use_cache:
        try:
            ut.save_cache(*tuple(list(cachetup) + [testres]))
        except Exception as ex:
            ut.printex(ex, 'error saving testres cache', iswarning=True)
            if ut.SUPER_STRICT:
                raise
    return testres
コード例 #35
0
ファイル: harness.py プロジェクト: whaozl/ibeis
def make_single_testres(ibs,
                        qaids,
                        daids,
                        pipecfg_list,
                        cfgx2_lbl,
                        cfgdict_list,
                        lbl,
                        testnameid,
                        use_cache=None,
                        subindexer_partial=ut.ProgressIter):
    """
    CommandLine:
        python -m ibeis.expt.harness --exec-run_test_configurations2
    """
    cfgslice = None
    if cfgslice is not None:
        pipecfg_list = pipecfg_list[cfgslice]

    dbname = ibs.get_dbname()

    if ut.NOT_QUIET:
        print('[harn] Make single testres')

    cfgx2_qreq_ = [
        ibs.new_query_request(qaids, daids, verbose=False, query_cfg=pipe_cfg)
        for pipe_cfg in ut.ProgressIter(
            pipecfg_list, lbl='Building qreq_', enabled=False)
    ]

    if use_cache is None:
        use_cache = USE_BIG_TEST_CACHE

    if use_cache:
        get_big_test_cache_info(ibs, cfgx2_qreq_)
        try:
            cachetup = get_big_test_cache_info(ibs, cfgx2_qreq_)
            testres = ut.load_cache(*cachetup)
            testres.cfgdict_list = cfgdict_list
            testres.cfgx2_lbl = cfgx2_lbl  # hack override
        except IOError:
            pass
        else:
            if ut.NOT_QUIET:
                ut.colorprint('[harn] single testres cache hit... returning',
                              'turquoise')
            return testres

    if ibs.table_cache:
        # HACK
        prev_feat_cfgstr = None

    cfgx2_cfgresinfo = []
    #nPipeCfg = len(pipecfg_list)
    cfgiter = subindexer_partial(range(len(cfgx2_qreq_)),
                                 lbl='query config',
                                 freq=1,
                                 adjust=False,
                                 separate=True)
    # Run each pipeline configuration
    for cfgx in cfgiter:
        qreq_ = cfgx2_qreq_[cfgx]

        ut.colorprint('testnameid=%r' % (testnameid, ), 'green')
        ut.colorprint(
            'annot_cfgstr = %s' %
            (qreq_.get_cfgstr(with_input=True, with_pipe=False), ), 'yellow')
        ut.colorprint(
            'pipe_cfgstr= %s' % (qreq_.get_cfgstr(with_data=False), ),
            'turquoise')
        ut.colorprint('pipe_hashstr = %s' % (qreq_.get_pipe_hashid(), ),
                      'teal')
        if DRY_RUN:
            continue

        indent_prefix = '[%s cfg %d/%d]' % (
            dbname,
            # cfgiter.count (doesnt work when quiet)
            (cfgiter.parent_index * cfgiter.nTotal) + cfgx,
            cfgiter.nTotal * cfgiter.parent_nTotal)

        with ut.Indenter(indent_prefix):
            # Run the test / read cache
            _need_compute = True
            if use_cache:
                # smaller cache for individual configuration runs
                st_cfgstr = qreq_.get_cfgstr(with_input=True)
                bt_cachedir = cachetup[0]
                st_cachedir = ut.unixjoin(bt_cachedir, 'small_tests')
                st_cachename = 'smalltest'
                ut.ensuredir(st_cachedir)
                try:
                    cfgres_info = ut.load_cache(st_cachedir, st_cachename,
                                                st_cfgstr)
                except IOError:
                    _need_compute = True
                else:
                    _need_compute = False
            if _need_compute:
                assert not ibs.table_cache
                if ibs.table_cache:
                    if (len(prev_feat_cfgstr is not None and
                            prev_feat_cfgstr != qreq_.qparams.feat_cfgstr)):
                        # Clear features to preserve memory
                        ibs.clear_table_cache()
                        #qreq_.ibs.print_cachestats_str()
                cfgres_info = get_query_result_info(qreq_)
                # record previous feature configuration
                if ibs.table_cache:
                    prev_feat_cfgstr = qreq_.qparams.feat_cfgstr
                if use_cache:
                    ut.save_cache(st_cachedir, st_cachename, st_cfgstr,
                                  cfgres_info)
        if not NOMEMORY:
            # Store the results
            cfgx2_cfgresinfo.append(cfgres_info)
        else:
            cfgx2_qreq_[cfgx] = None
    if ut.NOT_QUIET:
        ut.colorprint('[harn] Completed running test configurations', 'white')
    if DRY_RUN:
        print('ran tests dryrun mode.')
        return
    if NOMEMORY:
        print('ran tests in memory savings mode. Cannot Print. exiting')
        return
    # Store all pipeline config results in a test result object
    testres = test_result.TestResult(pipecfg_list, cfgx2_lbl, cfgx2_cfgresinfo,
                                     cfgx2_qreq_)
    testres.testnameid = testnameid
    testres.lbl = lbl
    testres.cfgdict_list = cfgdict_list
    testres.aidcfg = None
    if use_cache:
        try:
            ut.save_cache(*tuple(list(cachetup) + [testres]))
        except Exception as ex:
            ut.printex(ex, 'error saving testres cache', iswarning=True)
            if ut.SUPER_STRICT:
                raise
    return testres
コード例 #36
0
ファイル: IBEISControl.py プロジェクト: Erotemic/ibeis
 def get_neighbor_cachedir(ibs):
     neighbor_cachedir = ut.unixjoin(ibs.get_cachedir(), 'neighborcache2')
     return neighbor_cachedir
コード例 #37
0
def netrun():
    r"""
    CommandLine:
        # --- UTILITY
        python -m ibeis_cnn --tf get_juction_dpath --show

        # --- DATASET BUILDING ---
        # Build Dataset Aliases
        python -m ibeis_cnn --tf netrun --db PZ_MTEST --acfg ctrl --ensuredata --show
        python -m ibeis_cnn --tf netrun --db PZ_Master1 --acfg timectrl --ensuredata
        python -m ibeis_cnn --tf netrun --db PZ_Master1 --acfg timectrl:pername=None --ensuredata
        python -m ibeis_cnn --tf netrun --db PZ_Master1 --acfg timectrl:pername=None --ensuredata
        python -m ibeis_cnn --tf netrun --db mnist --ensuredata --show
        python -m ibeis_cnn --tf netrun --db mnist --ensuredata --show --datatype=category
        python -m ibeis_cnn --tf netrun --db mnist --ensuredata --show --datatype=siam-patch

        python -m ibeis_cnn --tf netrun --db PZ_Master1 --acfg ctrl:pername=None,excluderef=False,contributor_contains=FlankHack --ensuredata --show --datatype=siam-part

        # Parts based datasets
        python -m ibeis_cnn --tf netrun --db PZ_MTEST --acfg ctrl --datatype=siam-part --ensuredata --show

        % Patch based dataset (big one)
        python -m ibeis_cnn --tf netrun --db PZ_Master1 --acfg default:is_known=True,qmin_pername=2,view=primary,species=primary,minqual=ok --ensuredata --show --vtd
        python -m ibeis_cnn --tf netrun --ds pzm4 --weights=new --arch=siaml2_128 --train --monitor
        python -m ibeis_cnn --tf netrun --ds pzm4 --arch=siaml2_128 --test
        python -m ibeis_cnn --tf netrun --ds pzm4 --arch=siaml2_128 --veryverbose --no-flask

        # --- TRAINING ---
        python -m ibeis_cnn --tf netrun --db PZ_Master1 --acfg default:is_known=True,qmin_pername=2,view=primary,species=primary,minqual=ok --weights=new --arch=siaml2_128 --train --monitor

        python -m ibeis_cnn --tf netrun --ds timectrl_pzmaster1 --acfg ctrl:pername=None,excluderef=False,contributor_contains=FlankHack --train --weights=new --arch=siaml2_128  --monitor  # NOQA
        python -m ibeis_cnn --tf netrun --ds timectrl_pzmaster1 --acfg ctrl:pername=None,excluderef=False --train --weights=new --arch=siaml2_128  --monitor  # NOQA
        python -m ibeis_cnn --tf netrun --ds pzmtest --weights=new --arch=siaml2_128 --train --monitor --DEBUG_AUGMENTATION
        python -m ibeis_cnn --tf netrun --ds pzmtest --weights=new --arch=siaml2_128 --train --monitor

        python -m ibeis_cnn --tf netrun --ds flankhack --weights=new --arch=siaml2_partmatch --train --monitor --learning_rate=.00001
        python -m ibeis_cnn --tf netrun --ds flankhack --weights=new --arch=siam_deepfaceish --train --monitor --learning_rate=.00001

        # Different ways to train mnist
        python -m ibeis_cnn --tf netrun --db mnist --weights=new --arch=mnist_siaml2 --train --monitor --datatype=siam-patch
        python -m ibeis_cnn --tf netrun --db mnist --weights=new --arch=mnist-category --train --monitor --datatype=category

        # --- INITIALIZED-TRAINING ---
        python -m ibeis_cnn --tf netrun --ds pzmtest --arch=siaml2_128 --weights=gz-gray:current --train --monitor

        # --- TESTING ---
        python -m ibeis_cnn --tf netrun --db liberty --weights=liberty:current --arch=siaml2_128 --test
        python -m ibeis_cnn --tf netrun --db PZ_Master0 --weights=combo:current --arch=siaml2_128 --testall

    Example:
        >>> # DISABLE_DOCTEST
        >>> from ibeis_cnn.netrun import *  # NOQA
        >>> netrun()
        >>> ut.show_if_requested()
    """
    ut.colorprint('[netrun] NET RUN', 'red')

    requests, hyperparams, tags = parse_args()
    ds_tag         = tags['ds_tag']
    datatype       = tags['datatype']
    extern_ds_tag  = tags['extern_ds_tag']
    arch_tag       = tags['arch_tag']
    checkpoint_tag = tags['checkpoint_tag']

    # ----------------------------
    # Choose the main dataset
    ut.colorprint('[netrun] Ensuring Dataset', 'yellow')
    dataset = ingest_data.grab_dataset(ds_tag, datatype)
    if extern_ds_tag is not None:
        extern_dpath = ingest_data.get_extern_training_dpath(extern_ds_tag)
    else:
        extern_dpath = None

    print('dataset.training_dpath = %r' % (dataset.training_dpath,))

    print('Dataset Alias Key: %r' % (dataset.alias_key,))
    print('Current Dataset Tag: %r' % (
        ut.invert_dict(DS_TAG_ALIAS2).get(dataset.alias_key, None),))

    if requests['ensuredata']:
        # Print alias key that maps to this particular dataset
        if ut.show_was_requested():
            interact_ = dataset.interact()  # NOQA
            return
        print('...exiting')
        sys.exit(1)

    # ----------------------------
    # Choose model architecture
    # TODO: data will need to return info about number of labels in viewpoint models
    # Specify model archichitecture
    ut.colorprint('[netrun] Architecture Specification', 'yellow')
    if arch_tag == 'siam2stream':
        model = models.SiameseCenterSurroundModel(
            data_shape=dataset.data_shape,
            training_dpath=dataset.training_dpath, **hyperparams)
    elif arch_tag.startswith('siam'):
        model = models.SiameseL2(
            data_shape=dataset.data_shape,
            arch_tag=arch_tag,
            training_dpath=dataset.training_dpath, **hyperparams)
    elif arch_tag == 'mnist-category':
        model = models.MNISTModel(
            data_shape=dataset.data_shape,
            output_dims=dataset.output_dims,
            arch_tag=arch_tag,
            training_dpath=dataset.training_dpath, **hyperparams)
        pass
    else:
        raise ValueError('Unknown arch_tag=%r' % (arch_tag,))

    ut.colorprint('[netrun] Initialize archchitecture', 'yellow')
    model.init_arch()

    # ----------------------------
    # Choose weight initialization
    ut.colorprint('[netrun] Setting weights', 'yellow')
    if checkpoint_tag == 'new':
        ut.colorprint('[netrun] * Initializing new weights', 'lightgray')
        model.reinit_weights()
    else:
        checkpoint_tag = model.resolve_fuzzy_checkpoint_pattern(
            checkpoint_tag, extern_dpath)
        ut.colorprint('[netrun] * Resolving weights checkpoint_tag=%r' %
                      (checkpoint_tag,), 'lightgray')
        if extern_dpath is not None:
            model.load_extern_weights(dpath=extern_dpath,
                                      checkpoint_tag=checkpoint_tag)
        elif model.has_saved_state(checkpoint_tag=checkpoint_tag):
            model.load_model_state(checkpoint_tag=checkpoint_tag)
        else:
            model_state_fpath = model.get_model_state_fpath(
                checkpoint_tag=checkpoint_tag)
            print('model_state_fpath = %r' % (model_state_fpath,))
            ut.checkpath(model_state_fpath, verbose=True)
            print('Known checkpoints are: ' + ut.repr3(model.list_saved_checkpoints()))
            raise ValueError(('Unresolved weight init: '
                              'checkpoint_tag=%r, extern_ds_tag=%r') % (
                                  checkpoint_tag, extern_ds_tag,))

    #print('Model State:')
    #print(model.get_state_str())
    # ----------------------------
    if not model.is_train_state_initialized():
        ut.colorprint('[netrun] Need to initialize training state', 'yellow')
        X_train, y_train = dataset.subset('train')
        model.ensure_data_params(X_train, y_train)

    # Run Actions
    if requests['train']:
        ut.colorprint('[netrun] Training Requested', 'yellow')
        # parse training arguments
        config = ut.argparse_dict(dict(
            era_size=15,
            max_epochs=1200,
            rate_decay=.8,
        ))
        model.monitor_config.update(**config)
        X_train, y_train = dataset.subset('train')
        X_valid, y_valid = dataset.subset('valid')
        model.fit(X_train, y_train, X_valid=X_valid, y_valid=y_valid)

    elif requests['test']:
        #assert model.best_results['epoch'] is not None
        ut.colorprint('[netrun] Test Requested', 'yellow')
        if requests['testall']:
            ut.colorprint('[netrun]  * Testing on all data', 'lightgray')
            X_test, y_test = dataset.subset('all')
            flat_metadata = dataset.subset_metadata('all')
        else:
            ut.colorprint('[netrun]  * Testing on test subset', 'lightgray')
            X_test, y_test = dataset.subset('test')
            flat_metadata = dataset.subset_metadata('test')
        data, labels = X_test, y_test
        dataname = dataset.alias_key
        experiments.test_siamese_performance(model, data, labels,
                                             flat_metadata, dataname)
    else:
        if not ut.get_argflag('--cmd'):
            raise ValueError('nothing here. need to train or test')

    if requests['publish']:
        ut.colorprint('[netrun] Publish Requested', 'yellow')
        publish_dpath = ut.truepath('~/Dropbox/IBEIS')
        published_model_state = ut.unixjoin(
            publish_dpath, model.arch_tag + '_model_state.pkl')
        ut.copy(model.get_model_state_fpath(), published_model_state)
        ut.view_directory(publish_dpath)
        print('You need to get the dropbox link and '
              'register it into the appropriate file')
        # pip install dropbox
        # https://www.dropbox.com/developers/core/start/python
        # import dropbox  # need oauth
        #client.share('/myfile.txt', short_url=False)
        # https://cthulhu.dyn.wildme.io/public/models/siaml2_128_model_state.pkl

    if ut.get_argflag('--cmd'):
        ut.embed()
コード例 #38
0
ファイル: IBEISControl.py プロジェクト: Erotemic/ibeis
 def get_match_thumbdir(ibs):
     match_thumb_dir = ut.unixjoin(ibs.get_cachedir(), 'match_thumbs')
     ut.ensuredir(match_thumb_dir)
     return match_thumb_dir
コード例 #39
0
ファイル: makesetup.py プロジェクト: animalus/utool
    pkgname = basename(repodir)
    packages = utool.ls_moduledirs(repodir, full=False)
    print(pkgname)
    setup_text = setup_text_fmt.format(
        packages=packages,
        repodir=repodir,
        timestamp=timestamp,
        pkgname=pkgname,
    )
    return setup_text


if __name__ == '__main__':
    writeflag = utool.get_argflag(('--write', '-w'))
    overwriteflag = utool.get_argflag(('--yes', '-y'))
    repodir = utool.unixpath(os.getcwd())
    print('[utool] making setup.py for: %r' % repodir)
    setup_text = make_setup(repodir)
    if writeflag:
        setup_fpath = utool.unixjoin(repodir, 'setup.py')
        if utool.checkpath(setup_fpath):
            confirm_flag = overwriteflag
        else:
            confirm_flag = True
        if confirm_flag:
            utool.write_to(setup_fpath, setup_text)
        else:
            print('setup.py file exists not writing')
    else:
        print(setup_text)
コード例 #40
0
 def get_neighbor_cachedir(self):
     neighbor_cachedir = ut.unixjoin(self.get_cachedir(), 'neighborcache2')
     return neighbor_cachedir
コード例 #41
0
def merge_datasets(dataset_list):
    """
    Merges a list of dataset objects into a single combined dataset.
    """
    def consensus_check_factory():
        """
        Returns a temporary function used to check that all incoming values
        with the same key are consistent
        """
        from collections import defaultdict
        past_values = defaultdict(lambda: None)

        def consensus_check(value, key):
            assert past_values[key] is None or past_values[key] == value, (
                'key=%r with value=%r does not agree with past_value=%r' %
                (key, value, past_values[key]))
            past_values[key] = value
            return value

        return consensus_check

    total_num_labels = 0
    total_num_data = 0

    input_alias_list = [dataset.alias_key for dataset in dataset_list]

    alias_key = 'combo_' + ut.hashstr27(repr(input_alias_list), hashlen=8)
    training_dpath = ut.ensure_app_resource_dir('ibeis_cnn', 'training',
                                                alias_key)
    data_fpath = ut.unixjoin(training_dpath, alias_key + '_data.hdf5')
    labels_fpath = ut.unixjoin(training_dpath, alias_key + '_labels.hdf5')

    try:
        # Try and short circut cached loading
        merged_dataset = DataSet.from_alias_key(alias_key)
        return merged_dataset
    except (Exception, AssertionError) as ex:
        ut.printex(ex,
                   'alias definitions have changed. alias_key=%r' %
                   (alias_key, ),
                   iswarning=True)

    # Build the dataset
    consensus_check = consensus_check_factory()

    for dataset in dataset_list:
        print(ut.get_file_nBytes_str(dataset.data_fpath))
        print(dataset.data_fpath_dict['full'])
        print(dataset.num_labels)
        print(dataset.data_per_label)
        total_num_labels += dataset.num_labels
        total_num_data += (dataset.data_per_label * dataset.num_labels)
        # check that all data_dims agree
        data_shape = consensus_check(dataset.data_shape, 'data_shape')
        data_per_label = consensus_check(dataset.data_per_label,
                                         'data_per_label')

    # hack record this
    import numpy as np
    data_dtype = np.uint8
    label_dtype = np.int32
    data = np.empty((total_num_data, ) + data_shape, dtype=data_dtype)
    labels = np.empty(total_num_labels, dtype=label_dtype)

    #def iterable_assignment():
    #    pass
    data_left = 0
    data_right = None
    labels_left = 0
    labels_right = None
    for dataset in ut.ProgressIter(dataset_list,
                                   lbl='combining datasets',
                                   freq=1):
        X_all, y_all = dataset.subset('full')
        labels_right = labels_left + y_all.shape[0]
        data_right = data_left + X_all.shape[0]
        data[data_left:data_right] = X_all
        labels[labels_left:labels_right] = y_all
        data_left = data_right
        labels_left = labels_right

    ut.save_data(data_fpath, data)
    ut.save_data(labels_fpath, labels)

    labels = ut.load_data(labels_fpath)
    num_labels = len(labels)

    merged_dataset = DataSet.new_training_set(
        alias_key=alias_key,
        data_fpath=data_fpath,
        labels_fpath=labels_fpath,
        metadata_fpath=None,
        training_dpath=training_dpath,
        data_shape=data_shape,
        data_per_label=data_per_label,
        output_dims=1,
        num_labels=num_labels,
    )
    return merged_dataset
コード例 #42
0
def write_default_ipython_profile():
    """
    CommandLine:
        python ~/local/init/init_ipython_config.py

        python -c "import utool as ut; ut.vd(ut.unixpath('~/.ipython/profile_default'))"
        python -c "import utool as ut; ut.editfile(ut.unixpath('~/.ipython/profile_default/ipython_config.py'))"

    References:
        http://2sn.org/python/ipython_config.py
    """
    dpath = ut.unixpath('~/.ipython/profile_default')
    ut.ensuredir(dpath, info=True, verbose=True)
    ipy_config_fpath = ut.unixjoin(dpath, 'ipython_config.py')
    ipy_config_text = ut.codeblock(r'''
        # STARTBLOCK
        c = get_config()  # NOQA
        c.InteractiveShellApp.exec_lines = []
        import six
        if six.PY2:
            future_line = (
                'from __future__ import absolute_import, division, print_function, with_statement, unicode_literals')
            c.InteractiveShellApp.exec_lines.append(future_line)
            # Fix sip versions
            try:
                import sip
                # http://stackoverflow.com/questions/21217399/pyqt4-qtcore-qvariant-object-instead-of-a-string
                sip.setapi('QVariant', 2)
                sip.setapi('QString', 2)
                sip.setapi('QTextStream', 2)
                sip.setapi('QTime', 2)
                sip.setapi('QUrl', 2)
                sip.setapi('QDate', 2)
                sip.setapi('QDateTime', 2)
                if hasattr(sip, 'setdestroyonexit'):
                    sip.setdestroyonexit(False)  # This prevents a crash on windows
            except ImportError as ex:
                pass
            except ValueError as ex:
                print('Warning: Value Error: %s' % str(ex))
                pass
        c.InteractiveShellApp.exec_lines.append('%load_ext autoreload')
        c.InteractiveShellApp.exec_lines.append('%autoreload 2')
        #c.InteractiveShellApp.exec_lines.append('%pylab qt4')
        c.InteractiveShellApp.exec_lines.append('import numpy as np')
        c.InteractiveShellApp.exec_lines.append('import ubelt as ub')
        c.InteractiveShellApp.exec_lines.append('import utool as ut')
        c.InteractiveShellApp.exec_lines.append('import pandas as pd')
        c.InteractiveShellApp.exec_lines.append('pd.options.display.max_columns = 40')
        c.InteractiveShellApp.exec_lines.append('pd.options.display.width = 160')
        c.InteractiveShellApp.exec_lines.append('pd.options.display.max_rows = 20')
        c.InteractiveShellApp.exec_lines.append('pd.options.display.float_format = lambda x: \'%.4f\' % (x,)')
        c.InteractiveShellApp.exec_lines.append('import networkx as nx')
        #c.InteractiveShellApp.exec_lines.append('import plottool as pt')
        c.InteractiveShellApp.exec_lines.append('from os.path import *')
        c.InteractiveShellApp.exec_lines.append('from six.moves import cPickle as pickle')
        #c.InteractiveShellApp.exec_lines.append('if \'verbose\' not in vars():\\n    verbose = True')
        import utool as ut
        c.InteractiveShellApp.exec_lines.append(ut.codeblock(
            """
            class classproperty(property):
                def __get__(self, cls, owner):
                    return classmethod(self.fget).__get__(None, owner)()
            class vim(object):
                @classproperty
                def focus(cls):
                    import utool.util_ubuntu
                    utool.util_ubuntu.xctrl.do(('focus', 'GVIM'),)
                @classproperty
                def copy(cls):
                    import utool.util_ubuntu
                    utool.util_ubuntu.xctrl.do(('focus', 'GVIM'),)
                    import utool as ut
                    import IPython
                    ipy = IPython.get_ipython()
                    lastline = ipy.history_manager.input_hist_parsed[-2]
                    ut.copy_text_to_clipboard(lastline)
                    # import utool as ut
                    import utool.util_ubuntu
                    utool.util_ubuntu.xctrl.do(
                        ('focus', 'GVIM'),
                        ('key', 'ctrl+v'),
                        ('focus', 'x-terminal-emulator.X-terminal-emulator')
                    )
            """
        ))
        #c.InteractiveShell.autoindent = True
        #c.InteractiveShell.colors = 'LightBG'
        #c.InteractiveShell.confirm_exit = False
        #c.InteractiveShell.deep_reload = True
        c.InteractiveShell.editor = 'gvim'
        #c.InteractiveShell.xmode = 'Context'
        # ENDBOCK
        ''')
    ut.write_to(ipy_config_fpath, ipy_config_text)
コード例 #43
0
 def get_match_thumbdir(ibs):
     match_thumb_dir = ut.unixjoin(ibs.get_cachedir(), 'match_thumbs')
     ut.ensuredir(match_thumb_dir)
     return match_thumb_dir
コード例 #44
0
 def get_neighbor_cachedir(ibs):
     neighbor_cachedir = ut.unixjoin(ibs.get_cachedir(), 'neighborcache2')
     return neighbor_cachedir
コード例 #45
0
ファイル: old_chip_preproc.py プロジェクト: Erotemic/ibeis
def make_annot_chip_fpath_list(ibs, aid_list, config2_=None):
    chipdir = ibs.get_chipdir()
    chip_uri_list = make_annot_chip_uri_list(ibs, aid_list, config2_=config2_)
    cfpath_list = [ut.unixjoin(chipdir, chip_uri) for chip_uri in chip_uri_list]
    return cfpath_list