Example #1
0
def cached_impaint(
    bgr_img,
    cached_mask_fpath=None,
    label_colors=None,
    init_mask=None,
    aug=False,
    refine=False,
):
    import vtool as vt

    if cached_mask_fpath is None:
        cached_mask_fpath = 'image_' + ut.hashstr_arr(bgr_img) + '.png'
    if aug:
        cached_mask_fpath += '.' + ut.hashstr_arr(bgr_img)
        if label_colors is not None:
            cached_mask_fpath += ut.hashstr_arr(label_colors)
        cached_mask_fpath += '.png'
    # cached_mask_fpath = 'tmp_mask.png'
    if refine or not ut.checkpath(cached_mask_fpath):
        if refine and ut.checkpath(cached_mask_fpath):
            if init_mask is None:
                init_mask = vt.imread(cached_mask_fpath, grayscale=True)
        custom_mask = impaint_mask(bgr_img,
                                   label_colors=label_colors,
                                   init_mask=init_mask)
        vt.imwrite(cached_mask_fpath, custom_mask)
    else:
        custom_mask = vt.imread(cached_mask_fpath, grayscale=True)
    return custom_mask
Example #2
0
def new_word_index(aid_list=[],
                   vecs_list=[],
                   flann_params={},
                   flann_cachedir=None,
                   indexer_cfgstr='',
                   hash_rowids=True,
                   use_cache=not NOCACHE_WORD,
                   use_params_hash=True):
    print('[windex] building WordIndex object')
    _check_input(aid_list, vecs_list)
    # Create indexes into the input aids
    ax_list = np.arange(len(aid_list))
    idx2_vec, idx2_ax, idx2_fx = invert_index(vecs_list, ax_list)
    if hash_rowids:
        # Fingerprint
        aids_hashstr = utool.hashstr_arr(aid_list, '_AIDS')
        cfgstr = aids_hashstr + indexer_cfgstr
    else:
        # Dont hash rowids when given enough info in indexer_cfgstr
        cfgstr = indexer_cfgstr
    # Build/Load the flann index
    flann = nntool.flann_cache(
        idx2_vec, **{
            'cache_dir': flann_cachedir,
            'cfgstr': cfgstr,
            'flann_params': flann_params,
            'use_cache': use_cache,
            'use_params_hash': use_params_hash
        })
    ax2_aid = np.array(aid_list)
    windex = WordIndex(ax2_aid, idx2_vec, idx2_ax, idx2_fx, flann)
    return windex
Example #3
0
def load_qcx2_res(ibs, qrid_list, nocache=False):
    'Prefrosm / loads all queries'
    qreq = mc3.quickly_ensure_qreq(ibs, qrids=qrid_list)
    # Build query big cache rowid
    query_rowid = qreq.get_rowid()
    hs_rowid    = ibs.get_db_name()
    qcxs_rowid  = utool.hashstr_arr(qrid_list, lbl='_qcxs')
    qres_rowid  = hs_rowid + query_rowid + qcxs_rowid
    cache_dir = join(ibs.dirs.cache_dir, 'query_results_bigcache')
    print('[rr2] load_qcx2_res(): %r' % qres_rowid)
    io_kwargs = dict(dpath=cache_dir, fname='query_results', rowid=qres_rowid, ext='.cPkl')
    # Return cache if available
    if not params.args.nocache_query and (not nocache):
        qrid2_qres = io.smart_load(**io_kwargs)
        if qrid2_qres is not None:
            print('[rr2]  *  cache hit')
            return qrid2_qres
        print('[rr2]  *  cache miss')
    else:
        print('[rr2]  *  cache off')
    # Individually load / compute queries
    if isinstance(qrid_list, list):
        qcx_set = set(qrid_list)
    else:
        qcx_set = set(qrid_list.tolist())
    qcx_max = max(qrid_list) + 1
    qrid2_qres = [ibs.query(qrid) if qrid in qcx_set else None for qrid in xrange(qcx_max)]
    # Save to the cache
    print('[rr2] Saving query_results to bigcache: %r' % qres_rowid)
    utool.ensuredir(cache_dir)
    io.smart_save(qrid2_qres, **io_kwargs)
    return qrid2_qres
Example #4
0
def new_word_index(aid_list=[], vecs_list=[], flann_params={},
                       flann_cachedir=None, indexer_cfgstr='', hash_rowids=True,
                       use_cache=not NOCACHE_WORD, use_params_hash=True):
    print('[windex] building WordIndex object')
    _check_input(aid_list, vecs_list)
    # Create indexes into the input aids
    ax_list = np.arange(len(aid_list))
    idx2_vec, idx2_ax, idx2_fx = invert_index(vecs_list, ax_list)
    if hash_rowids:
        # Fingerprint
        aids_hashstr = utool.hashstr_arr(aid_list, '_AIDS')
        cfgstr = aids_hashstr + indexer_cfgstr
    else:
        # Dont hash rowids when given enough info in indexer_cfgstr
        cfgstr = indexer_cfgstr
    # Build/Load the flann index
    flann = nntool.flann_cache(idx2_vec, **{
        'cache_dir': flann_cachedir,
        'cfgstr': cfgstr,
        'flann_params': flann_params,
        'use_cache': use_cache,
        'use_params_hash': use_params_hash})
    ax2_aid = np.array(aid_list)
    windex = WordIndex(ax2_aid, idx2_vec, idx2_ax, idx2_fx, flann)
    return windex
Example #5
0
    def get_cfgstr(nnindexer, noquery=False):
        r""" returns string which uniquely identified configuration and support data

        Args:
            noquery (bool): if True cfgstr is only relevant to building the
                index. No search params are returned (default = False)

        Returns:
            str: flann_cfgstr

        CommandLine:
            python -m wbia.algo.hots.neighbor_index --test-get_cfgstr

        Example:
            >>> # DISABLE_DOCTEST
            >>> from wbia.algo.hots.neighbor_index import *  # NOQA
            >>> import wbia
            >>> cfgdict = dict(fg_on=False)
            >>> qreq_ = wbia.testdata_qreq_(defaultdb='testdb1', p='default:fg_on=False')
            >>> qreq_.load_indexer()
            >>> nnindexer = qreq_.indexer
            >>> noquery = True
            >>> flann_cfgstr = nnindexer.get_cfgstr(noquery)
            >>> result = ('flann_cfgstr = %s' % (str(flann_cfgstr),))
            >>> print(result)
            flann_cfgstr = _FLANN((algo=kdtree,seed=42,t=8,))_VECS((11260,128)gj5nea@ni0%f3aja)
        """
        flann_cfgstr_list = []
        use_params_hash = True
        use_data_hash = True
        if use_params_hash:
            flann_defaults = vt.get_flann_params(
                nnindexer.flann_params['algorithm'])
            # flann_params_clean = flann_defaults.copy()
            flann_params_clean = ut.sort_dict(flann_defaults)
            ut.update_existing(flann_params_clean, nnindexer.flann_params)
            if noquery:
                ut.delete_dict_keys(flann_params_clean, ['checks'])
            shortnames = dict(algorithm='algo',
                              checks='chks',
                              random_seed='seed',
                              trees='t')
            short_params = ut.odict([
                (shortnames.get(key, key), str(val)[0:7])
                for key, val in six.iteritems(flann_params_clean)
            ])
            flann_valsig_ = ut.repr2(short_params,
                                     nl=False,
                                     explicit=True,
                                     strvals=True)
            flann_valsig_ = flann_valsig_.lstrip('dict').replace(' ', '')
            # flann_valsig_ = str(list(flann_params.values()))
            # flann_valsig = ut.remove_chars(flann_valsig_, ', \'[]')
            flann_cfgstr_list.append('_FLANN(' + flann_valsig_ + ')')
        if use_data_hash:
            vecs_hashstr = ut.hashstr_arr(nnindexer.idx2_vec, '_VECS')
            flann_cfgstr_list.append(vecs_hashstr)
        flann_cfgstr = ''.join(flann_cfgstr_list)
        return flann_cfgstr
Example #6
0
def make_new_dbpath(ibs, id_label, id_list):
    """
    Creates a new database path unique to the exported subset of ids.
    """
    import ibeis
    tag_hash = ut.hashstr_arr(id_list, hashlen=8, alphabet=ut.ALPHABET_27)
    base_fmtstr = ibs.get_dbname() + '_' + id_label + 's=' + \
        tag_hash.replace('(', '_').replace(')', '_') + '_%d'
    dpath = ibeis.get_workdir()
    new_dbpath = ut.non_existing_path(base_fmtstr, dpath)
    return new_dbpath
Example #7
0
    def get_cfgstr(nnindexer, noquery=False):
        r""" returns string which uniquely identified configuration and support data

        Args:
            noquery (bool): if True cfgstr is only relevant to building the
                index. No search params are returned (default = False)

        Returns:
            str: flann_cfgstr

        CommandLine:
            python -m ibeis.algo.hots.neighbor_index --test-get_cfgstr

        Example:
            >>> # DISABLE_DOCTEST
            >>> from ibeis.algo.hots.neighbor_index import *  # NOQA
            >>> import ibeis
            >>> cfgdict = dict(fg_on=False)
            >>> qreq_ = ibeis.testdata_qreq_(defaultdb='testdb1', p='default:fg_on=False')
            >>> qreq_.load_indexer()
            >>> nnindexer = qreq_.indexer
            >>> noquery = True
            >>> flann_cfgstr = nnindexer.get_cfgstr(noquery)
            >>> result = ('flann_cfgstr = %s' % (str(flann_cfgstr),))
            >>> print(result)
            flann_cfgstr = _FLANN((algo=kdtree,seed=42,t=8,))_VECS((11260,128)gj5nea@ni0%f3aja)
        """
        flann_cfgstr_list = []
        use_params_hash = True
        use_data_hash = True
        if use_params_hash:
            flann_defaults = vt.get_flann_params(nnindexer.flann_params['algorithm'])
            #flann_params_clean = flann_defaults.copy()
            flann_params_clean = ut.sort_dict(flann_defaults)
            ut.updateif_haskey(flann_params_clean, nnindexer.flann_params)
            if noquery:
                ut.delete_dict_keys(flann_params_clean, ['checks'])
            shortnames = dict(algorithm='algo', checks='chks', random_seed='seed', trees='t')
            short_params = ut.odict([(shortnames.get(key, key), str(val)[0:7])
                                     for key, val in six.iteritems(flann_params_clean)])
            flann_valsig_ = ut.dict_str(
                short_params, nl=False, explicit=True, strvals=True)
            flann_valsig_ = flann_valsig_.lstrip('dict').replace(' ', '')
            #flann_valsig_ = str(list(flann_params.values()))
            #flann_valsig = ut.remove_chars(flann_valsig_, ', \'[]')
            flann_cfgstr_list.append('_FLANN(' + flann_valsig_ + ')')
        if use_data_hash:
            vecs_hashstr = ut.hashstr_arr(nnindexer.idx2_vec, '_VECS')
            flann_cfgstr_list.append(vecs_hashstr)
        flann_cfgstr = ''.join(flann_cfgstr_list)
        return flann_cfgstr
Example #8
0
def get_indexed_cfgstr(ibs, aid_list):
    """
    Creates a config string for the input into the nearest neighbors index
    It is based off of the features which were computed for it and the indexes
    of the input annotations.

    TODO: We should probably use the Annotation UUIDS rather than the ROWIDs
    to compute this configstr

    """
    feat_cfgstr = ibs.cfg.feat_cfg.get_cfgstr()
    # returns something like: _daids((6)qbm6uaegu7gv!ut!)_FEAT(params)
    daid_cfgstr = utool.hashstr_arr(aid_list, 'daids')  # todo change to uuids
    new_cfgstr = '_' + daid_cfgstr + feat_cfgstr
    return new_cfgstr
Example #9
0
 def get_cfgstr_list(vocabtrain_cfg, **kwargs):
     if vocabtrain_cfg.override_vocab == 'default':
         if isinstance(vocabtrain_cfg.vocab_taids, six.string_types):
             taids_cfgstr = 'taids=%s' % vocabtrain_cfg.vocab_taids
         else:
             taids_cfgstr = ut.hashstr_arr(vocabtrain_cfg.vocab_taids,
                                           'taids', hashlen=8)
         vocabtrain_cfg_list = [
             '_VocabTrain(',
             'nWords=%d' % (vocabtrain_cfg.nWords,),
             ',init=', str(vocabtrain_cfg.vocab_init_method),
             ',nIters=%d,' % int(vocabtrain_cfg.vocab_nIters),
             taids_cfgstr,
             ')',
         ]
     else:
         vocabtrain_cfg_list = ['_VocabTrain(override=%s)' %
                                (vocabtrain_cfg.override_vocab,)]
     return vocabtrain_cfg_list
Example #10
0
def make_incremental_test_database(ibs_gt, aid_list1, reset):
    """
    Makes test database. adds image and annotations but does not transfer names.
    if reset is true the new database is gaurenteed to be built from a fresh
    start.

    Args:
        ibs_gt    (IBEISController):
        aid_list1 (list):
        reset     (bool): if True the test database is completely rebuilt

    Returns:
        IBEISController: ibs2
    """
    import ibeis
    print('make_incremental_test_database. reset=%r' % (reset, ))
    aids1_hashid = ut.hashstr_arr(aid_list1)
    prefix = '_INCTEST_' + aids1_hashid + '_'
    dbname2 = prefix + ibs_gt.get_dbname()
    ibs2 = ibeis.opendb(dbname2,
                        allow_newdir=True,
                        delete_ibsdir=reset,
                        use_cache=False)
    # reset if flag specified or no data in ibs2
    if reset or len(ibs2.get_valid_gids()) == 0:
        assert len(ibs2.get_valid_aids()) == 0
        assert len(ibs2.get_valid_gids()) == 0
        assert len(ibs2.get_valid_nids()) == 0
        # Get annotations and their images from database 1
        gid_list1 = ibs_gt.get_annot_gids(aid_list1)
        gpath_list1 = ibs_gt.get_image_paths(gid_list1)
        # Add all images from database 1 to database 2
        gid_list2 = ibs2.add_images(gpath_list1, auto_localize=False)
        # Image UUIDS should be consistent between databases
        image_uuid_list1 = ibs_gt.get_image_uuids(gid_list1)
        image_uuid_list2 = ibs2.get_image_uuids(gid_list2)
        assert image_uuid_list1 == image_uuid_list2
        ut.assert_lists_eq(image_uuid_list1, image_uuid_list2)
    return ibs2
Example #11
0
def load_qcx2_res(ibs, qrid_list, nocache=False):
    'Prefrosm / loads all queries'
    qreq = mc3.quickly_ensure_qreq(ibs, qrids=qrid_list)
    # Build query big cache rowid
    query_rowid = qreq.get_rowid()
    hs_rowid = ibs.get_db_name()
    qcxs_rowid = utool.hashstr_arr(qrid_list, lbl='_qcxs')
    qres_rowid = hs_rowid + query_rowid + qcxs_rowid
    cache_dir = join(ibs.dirs.cache_dir, 'query_results_bigcache')
    print('[rr2] load_qcx2_res(): %r' % qres_rowid)
    io_kwargs = dict(dpath=cache_dir,
                     fname='query_results',
                     rowid=qres_rowid,
                     ext='.cPkl')
    # Return cache if available
    if not params.args.nocache_query and (not nocache):
        qrid2_qres = io.smart_load(**io_kwargs)
        if qrid2_qres is not None:
            print('[rr2]  *  cache hit')
            return qrid2_qres
        print('[rr2]  *  cache miss')
    else:
        print('[rr2]  *  cache off')
    # Individually load / compute queries
    if isinstance(qrid_list, list):
        qcx_set = set(qrid_list)
    else:
        qcx_set = set(qrid_list.tolist())
    qcx_max = max(qrid_list) + 1
    qrid2_qres = [
        ibs.query(qrid) if qrid in qcx_set else None
        for qrid in xrange(qcx_max)
    ]
    # Save to the cache
    print('[rr2] Saving query_results to bigcache: %r' % qres_rowid)
    utool.ensuredir(cache_dir)
    io.smart_save(qrid2_qres, **io_kwargs)
    return qrid2_qres
Example #12
0
def make_incremental_test_database(ibs_gt, aid_list1, reset):
    """
    Makes test database. adds image and annotations but does not transfer names.
    if reset is true the new database is gaurenteed to be built from a fresh
    start.

    Args:
        ibs_gt    (IBEISController):
        aid_list1 (list):
        reset     (bool): if True the test database is completely rebuilt

    Returns:
        IBEISController: ibs2
    """
    import ibeis
    print('make_incremental_test_database. reset=%r' % (reset,))
    aids1_hashid = ut.hashstr_arr(aid_list1)
    prefix = '_INCTEST_' + aids1_hashid + '_'
    dbname2 = prefix + ibs_gt.get_dbname()
    ibs2 = ibeis.opendb(dbname2, allow_newdir=True, delete_ibsdir=reset, use_cache=False)
    # reset if flag specified or no data in ibs2
    if reset or len(ibs2.get_valid_gids()) == 0:
        assert len(ibs2.get_valid_aids())  == 0
        assert len(ibs2.get_valid_gids())  == 0
        assert len(ibs2.get_valid_nids())  == 0
        # Get annotations and their images from database 1
        gid_list1 = ibs_gt.get_annot_gids(aid_list1)
        gpath_list1 = ibs_gt.get_image_paths(gid_list1)
        # Add all images from database 1 to database 2
        gid_list2 = ibs2.add_images(gpath_list1, auto_localize=False)
        # Image UUIDS should be consistent between databases
        image_uuid_list1 = ibs_gt.get_image_uuids(gid_list1)
        image_uuid_list2 = ibs2.get_image_uuids(gid_list2)
        assert image_uuid_list1 == image_uuid_list2
        ut.assert_lists_eq(image_uuid_list1, image_uuid_list2)
    return ibs2
Example #13
0
 def get_qaids_hashid(qreq):
     assert len(qreq.qaids) > 0, 'QueryRequest not populated. len(qaids)=0'
     qaids_hashid = utool.hashstr_arr(qreq.qaids, '_qaids')
     return qaids_hashid
Example #14
0
 def get_qaids_hashid(qreq):
     assert len(qreq.qaids) > 0, 'QueryRequest not populated. len(qaids)=0'
     qaids_hashid = utool.hashstr_arr(qreq.qaids, '_qaids')
     return qaids_hashid
Example #15
0
def get_flann_cfgstr(ibs, aid_list):
    """ </CYTHE> """
    feat_cfgstr   = ibs.cfg.feat_cfg.get_cfgstr()
    sample_cfgstr = utool.hashstr_arr(aid_list, 'daids')
    cfgstr = '_' + sample_cfgstr + feat_cfgstr
    return cfgstr