Exemple #1
0
def get_image_detectimg_fpath_list(ibs, gid_list):
    r""" Returns detectimg path list

    Example:
        >>> # ENABLE_DOCTEST
        >>> import ibeis
        >>> from os.path import basename
        >>> from ibeis.algo.preproc.preproc_detectimg import *  # NOQA
        >>> ibs = ibeis.opendb('testdb1')
        >>> valid_gids = ibs.get_valid_gids()
        >>> gid_list = valid_gids[0:2]
        >>> new_gfpath_list = get_image_detectimg_fpath_list(ibs, gid_list)
        >>> result = ('\n'.join(map(basename, new_gfpath_list)))
        >>> target = '\n'.join((
        ...    'reszd_sqrtArea=800_66ec193a-1619-b3b6-216d-1784b4833b61.jpg',
        ...    'reszd_sqrtArea=800_d8903434-942f-e0f5-d6c2-0dcbe3137bf7.jpg'))
        >>> assert result == target, 'got result=\n%s' % result

    """
    utool.assert_all_not_None(gid_list, 'gid_list')
    sqrt_area   = ibs.cfg.detect_cfg.detectimg_sqrt_area
    gext_list    = ibs.get_image_exts(gid_list)
    guuid_list   = ibs.get_image_uuids(gid_list)
    cachedir = ibs.get_detectimg_cachedir()
    new_gfpath_list = [join(cachedir, 'reszd_sqrtArea=' + str(sqrt_area) + '_' + str(guuid) + ext)
                       for (guuid, ext) in zip(guuid_list, gext_list)]
    return new_gfpath_list
Exemple #2
0
def compute_or_read_annotation_chips(ibs, aid_list, ensure=True):
    """ Reads chips and tries to compute them if they do not exist """
    #print('[preproc_chip] compute_or_read_chips')
    if ensure:
        try:
            utool.assert_all_not_None(aid_list, 'aid_list')
        except AssertionError as ex:
            utool.printex(ex, key_list=['aid_list'])
            raise
    cfpath_list = get_annot_cfpath_list(ibs, aid_list)
    try:
        if ensure:
            chip_list = [gtool.imread(cfpath) for cfpath in cfpath_list]
        else:
            chip_list = [None if cfpath is None else gtool.imread(cfpath) for cfpath in cfpath_list]
    except IOError as ex:
        if not utool.QUIET:
            utool.printex(ex, '[preproc_chip] Handing Exception: ')
        ibs.add_chips(aid_list)
        try:
            chip_list = [gtool.imread(cfpath) for cfpath in cfpath_list]
        except IOError:
            print('[preproc_chip] cache must have been deleted from disk')
            compute_and_write_chips_lazy(ibs, aid_list)
            # Try just one more time
            chip_list = [gtool.imread(cfpath) for cfpath in cfpath_list]

    return chip_list
Exemple #3
0
def get_image_detectimg_fpath_list(ibs, gid_list):
    """ Returns detectimg path list """
    utool.assert_all_not_None(gid_list, 'gid_list')
    gext_list    = ibs.get_image_exts(gid_list)
    guuid_list   = ibs.get_image_uuids(gid_list)
    cachedir = ibs.get_detectimg_cachedir()
    new_gfpath_list = [join(cachedir, 'reszd_' + str(guuid) + ext)
                       for (guuid, ext) in izip(guuid_list, gext_list)]
    return new_gfpath_list
Exemple #4
0
def compute_or_read_annotation_chips(ibs, aid_list, ensure=True, config2_=None,
                                     verbose=False, eager=True):
    r"""
    SUPER HACY FUNCTION. NEED TO DEPRICATE

    ----------------------
    Found 1 line(s) in 'ibeis/algo/preproc/preproc_chip.py':
    preproc_chip.py :  25 |def compute_or_read_annotation_chips(ibs, aid_list,
    ensure=True):
    ----------------------
    Found 1 line(s) in 'ibeis/control/manual_chip_funcs.py':
    manual_chip_funcs.py : 313 |    chip_list =
    preproc_chip.compute_or_read_annotation_chips(ibs, aid_list, ensure=ensure)

    """
    if ensure:
        try:
            ut.assert_all_not_None(aid_list, 'aid_list')
        except AssertionError as ex:
            ut.printex(ex, key_list=['aid_list'])
            raise
    nTotal = len(aid_list)
    cfpath_list = make_annot_chip_fpath_list(ibs, aid_list, config2_=config2_)
    mk_cpath_iter = functools.partial(ut.ProgressIter, cfpath_list,
                                      nTotal=nTotal, enabled=verbose, freq=100)
    try:
        if ensure:
            cfpath_iter = mk_cpath_iter(lbl='reading ensured chips')
            chip_list = [vt.imread(cfpath) for cfpath in cfpath_iter]
            #for cfpath in cfpath_iter:
            #    yield vt.imread(cfpath)
        else:
            cfpath_iter = mk_cpath_iter(lbl='reading existing chips')
            chip_list = [None if cfpath is None else vt.imread(cfpath) for cfpath in cfpath_iter]
            #for cfpath in cfpath_iter:
            #    yield None if cfpath is None else vt.imread(cfpath)
    except IOError as ex:
        if not ut.QUIET:
            ut.printex(ex, '[preproc_chip] Handing Exception: ', iswarning=True)
        ibs.add_annot_chips(aid_list)
        try:
            cfpath_iter = mk_cpath_iter(lbl='reading fallback1 chips')
            chip_list = [vt.imread(cfpath) for cfpath in cfpath_iter]
        except IOError:
            print('[preproc_chip] cache must have been deleted from disk')
            # TODO: WE CAN SEARCH FOR NON EXISTANT PATHS HERE AND CALL
            # ibs.delete_annot_chips
            compute_and_write_chips_lazy(ibs, aid_list)
            # Try just one more time
            cfpath_iter = mk_cpath_iter(lbl='reading fallback2 chips')
            chip_list = [vt.imread(cfpath) for cfpath in cfpath_iter]

    return chip_list
def get_chips(ibs, cid_list, ensure=True):
    """
    Returns:
        chip_list (list): a list cropped images in numpy array form by their cid
    """
    from ibeis.model.preproc import preproc_chip
    if ensure:
        try:
            ut.assert_all_not_None(cid_list, 'cid_list')
        except AssertionError as ex:
            ut.printex(ex, 'Invalid cid_list', key_list=[
                'ensure', 'cid_list'])
            raise
    aid_list = ibs.get_chip_aids(cid_list)
    chip_list = preproc_chip.compute_or_read_annotation_chips(ibs, aid_list, ensure=ensure)
    return chip_list
Exemple #6
0
def get_chips(ibs, cid_list, ensure=True, verbose=False, eager=True, config2_=None):
    r"""
    Returns:
        chip_list (list): a list cropped images in numpy array form by their cid

    Args:
        cid_list (list):
        ensure (bool):  eager evaluation if True

    RESTful:
        Returns the base64 encoded image of annotation (chip) <aid>  # Documented and routed in ibeis.web app.py
        Method: GET
        URL:    /api/annot/<aid>

    CommandLine:
        python -m ibeis.templates.template_generator --key chip --funcname-filter '\<get_chips\>'

    Returns:
        list: chip_list
    """
    # FIXME: HACK: this should not have to read in config2
    # (unless it needs to compute the chips?)
    from ibeis.algo.preproc import preproc_chip
    if ensure:
        try:
            ut.assert_all_not_None(cid_list, 'cid_list')
        except AssertionError as ex:
            ut.printex(ex, 'Invalid cid_list', key_list=[
                'ensure', 'cid_list'])
            raise
    aid_list = ibs.get_chip_aids(cid_list)
    if eager:
        chip_list = preproc_chip.compute_or_read_annotation_chips(
            ibs, aid_list, ensure=ensure, verbose=verbose, config2_=config2_)
    else:
        import vtool as vt
        cfpath_list = preproc_chip.make_annot_chip_fpath_list(
            ibs, aid_list, config2_=config2_)
        nInput = len(cid_list)
        chip_list = (
            vt.imread(cfpath)
            for cfpath in ut.ProgressIter(cfpath_list, Total=nInput,
                                          lbl='Lazy Reading Chips',
                                          enabled=verbose, freq=100)
        )
    return chip_list
Exemple #7
0
def get_annot_chips(ibs, aid_list, ensure=True, config2_=None, verbose=False, eager=True):
    r"""
    Args:
        ibs (IBEISController):  ibeis controller object
        aid_list (int):  list of annotation ids
        ensure (bool):  eager evaluation if True
        config2_ (QueryRequest):  query request object with hyper-parameters

    Returns:
        list: chip_list


    CommandLine:
        python -m ibeis.control.manual_chip_funcs --test-get_annot_chips
        python -m ibeis.templates.template_generator --key chip --funcname-filter '\<get_annot_chips\>'

    RESTful:
        Method: GET
        URL:    /api/annot_chip/

    Example:
        >>> # ENABLE_DOCTEST
        >>> from ibeis.control.manual_chip_funcs import *  # NOQA
        >>> import ibeis
        >>> ibs = ibeis.opendb('testdb1')
        >>> aid_list = ibs.get_valid_aids()[0:5]
        >>> ensure = True
        >>> config2_ = None
        >>> chip_list = get_annot_chips(ibs, aid_list, ensure, config2_)
        >>> chip_sum_list = list(map(np.sum, chip_list))
        >>> ut.assert_almost_eq(chip_sum_list, [96053500, 65152954, 67223241, 109358624, 73995960], 2000)
        >>> print(chip_sum_list)
    """
    ut.assert_all_not_None(aid_list, 'aid_list')
    cid_list = ibs.get_annot_chip_rowids(aid_list, ensure=ensure, config2_=config2_)
    chip_list = ibs.get_chips(cid_list, ensure=ensure, verbose=verbose, eager=eager, config2_=config2_)
    return chip_list
    def dummy_preproc_kpts(depc, chip_rowids, config=None):
        if config is None:
            config = {}
        print('config = %r' % (config, ))
        adapt_shape = config['adapt_shape']
        print('[preproc] Computing kpts')

        ut.assert_all_not_None(chip_rowids, 'chip_rowids')
        # This is in here to attempt to trigger a failure of the chips dont
        # exist and the feature cache is called.
        chip_fpath_list = depc.get_native('chip',
                                          chip_rowids,
                                          'chip',
                                          read_extern=False)
        print('computing featurse from chip_fpath_list = %r' %
              (chip_fpath_list, ))

        for rowid in chip_rowids:
            if adapt_shape:
                kpts = np.zeros((7 + rowid, 6)) + rowid
            else:
                kpts = np.ones((7 + rowid, 6)) + rowid
            num = len(kpts)
            yield kpts, num
Exemple #9
0
def add_annot_chunk(ibs_gt, ibs2, aids_chunk1, aid1_to_aid2):
    """
    adds annotations to the tempoarary database and prevents duplicate
    additions.

    aids_chunk1 = aid_list1

    Args:
        ibs_gt       (IBEISController):
        ibs2         (IBEISController):
        aids_chunk1  (list):
        aid1_to_aid2 (dict):

    Returns:
        list: aids_chunk2
    """
    # Visual info
    guuids_chunk1 = ibs_gt.get_annot_image_uuids(aids_chunk1)
    verts_chunk1  = ibs_gt.get_annot_verts(aids_chunk1)
    thetas_chunk1 = ibs_gt.get_annot_thetas(aids_chunk1)
    # Non-name semantic info
    species_chunk1 = ibs_gt.get_annot_species_texts(aids_chunk1)
    gids_chunk2 = ibs2.get_image_gids_from_uuid(guuids_chunk1)
    ut.assert_all_not_None(aids_chunk1, 'aids_chunk1')
    ut.assert_all_not_None(guuids_chunk1, 'guuids_chunk1')
    try:
        ut.assert_all_not_None(gids_chunk2, 'gids_chunk2')
    except Exception as ex:
        #index = ut.get_first_None_position(gids_chunk2)
        #set(ibs2.get_valid_gids()).difference(set(gids_chunk2))
        ut.printex(ex, keys=['gids_chunk2'])
        #ut.embed()
        #raise
    # Add this new unseen test case to the database
    aids_chunk2 = ibs2.add_annots(gids_chunk2,
                                  species_list=species_chunk1,
                                  vert_list=verts_chunk1,
                                  theta_list=thetas_chunk1,
                                  prevent_visual_duplicates=True)
    def register_annot_mapping(aids_chunk1, aids_chunk2, aid1_to_aid2):
        """
        called by add_annot_chunk
        """
        # Should be 1 to 1
        for aid1, aid2 in zip(aids_chunk1, aids_chunk2):
            if aid1 in aid1_to_aid2:
                assert aid1_to_aid2[aid1] == aid2
            else:
                aid1_to_aid2[aid1] = aid2
    # Register the mapping from ibs_gt to ibs2
    register_annot_mapping(aids_chunk1, aids_chunk2, aid1_to_aid2)
    print('Added: aids_chunk2=%s' % (ut.truncate_str(repr(aids_chunk2), maxlen=60),))
    return aids_chunk2
def get_injured_sharks():
    """
    >>> from wbia.scripts.getshark import *  # NOQA
    """
    import requests

    url = 'http://www.whaleshark.org/getKeywordImages.jsp'
    resp = requests.get(url)
    assert resp.status_code == 200
    keywords = resp.json()['keywords']
    key_list = ut.take_column(keywords, 'indexName')
    key_to_nice = {k['indexName']: k['readableName'] for k in keywords}

    injury_patterns = [
        'injury',
        'net',
        'hook',
        'trunc',
        'damage',
        'scar',
        'nicks',
        'bite',
    ]

    injury_keys = [
        key for key in key_list if any([pat in key for pat in injury_patterns])
    ]
    noninjury_keys = ut.setdiff(key_list, injury_keys)
    injury_nice = ut.lmap(lambda k: key_to_nice[k], injury_keys)  # NOQA
    noninjury_nice = ut.lmap(lambda k: key_to_nice[k], noninjury_keys)  # NOQA
    key_list = injury_keys

    keyed_images = {}
    for key in ut.ProgIter(key_list, lbl='reading index', bs=True):
        key_url = url + '?indexName={indexName}'.format(indexName=key)
        key_resp = requests.get(key_url)
        assert key_resp.status_code == 200
        key_imgs = key_resp.json()['images']
        keyed_images[key] = key_imgs

    key_hist = {key: len(imgs) for key, imgs in keyed_images.items()}
    key_hist = ut.sort_dict(key_hist, 'vals')
    logger.info(ut.repr3(key_hist))
    nice_key_hist = ut.map_dict_keys(lambda k: key_to_nice[k], key_hist)
    nice_key_hist = ut.sort_dict(nice_key_hist, 'vals')
    logger.info(ut.repr3(nice_key_hist))

    key_to_urls = {
        key: ut.take_column(vals, 'url')
        for key, vals in keyed_images.items()
    }
    overlaps = {}
    import itertools

    overlap_img_list = []
    for k1, k2 in itertools.combinations(key_to_urls.keys(), 2):
        overlap_imgs = ut.isect(key_to_urls[k1], key_to_urls[k2])
        num_overlap = len(overlap_imgs)
        overlaps[(k1, k2)] = num_overlap
        overlaps[(k1, k1)] = len(key_to_urls[k1])
        if num_overlap > 0:
            # logger.info('[%s][%s], overlap=%r' % (k1, k2, num_overlap))
            overlap_img_list.extend(overlap_imgs)

    all_img_urls = list(set(ut.flatten(key_to_urls.values())))
    num_all = len(all_img_urls)  # NOQA
    logger.info('num_all = %r' % (num_all, ))

    # Determine super-categories
    categories = ['nicks', 'scar', 'trunc']

    # Force these keys into these categories
    key_to_cat = {'scarbite': 'other_injury'}

    cat_to_keys = ut.ddict(list)

    for key in key_to_urls.keys():
        flag = 1
        if key in key_to_cat:
            cat = key_to_cat[key]
            cat_to_keys[cat].append(key)
            continue
        for cat in categories:
            if cat in key:
                cat_to_keys[cat].append(key)
                flag = 0
        if flag:
            cat = 'other_injury'
            cat_to_keys[cat].append(key)

    cat_urls = ut.ddict(list)
    for cat, keys in cat_to_keys.items():
        for key in keys:
            cat_urls[cat].extend(key_to_urls[key])

    cat_hist = {}
    for cat in list(cat_urls.keys()):
        cat_urls[cat] = list(set(cat_urls[cat]))
        cat_hist[cat] = len(cat_urls[cat])

    logger.info(ut.repr3(cat_to_keys))
    logger.info(ut.repr3(cat_hist))

    key_to_cat = dict([(val, key) for key, vals in cat_to_keys.items()
                       for val in vals])

    # ingestset = {
    #    '__class__': 'ImageSet',
    #    'images': ut.ddict(dict)
    # }
    # for key, key_imgs in keyed_images.items():
    #    for imgdict in key_imgs:
    #        url = imgdict['url']
    #        encid = imgdict['correspondingEncounterNumber']
    #        # Make structure
    #        encdict = encounters[encid]
    #        encdict['__class__'] = 'Encounter'
    #        imgdict = ut.delete_keys(imgdict.copy(), ['correspondingEncounterNumber'])
    #        imgdict['__class__'] = 'Image'
    #        cat = key_to_cat[key]
    #        annotdict = {'relative_bbox': [.01, .01, .98, .98], 'tags': [cat, key]}
    #        annotdict['__class__'] = 'Annotation'

    #        # Ensure structures exist
    #        encdict['images'] = encdict.get('images', [])
    #        imgdict['annots'] = imgdict.get('annots', [])

    #        # Add an image to this encounter
    #        encdict['images'].append(imgdict)
    #        # Add an annotation to this image
    #        imgdict['annots'].append(annotdict)

    # # http://springbreak.wildbook.org/rest/org.ecocean.Encounter/1111
    # get_enc_url = 'http://www.whaleshark.org/rest/org.ecocean.Encounter/%s' % (encid,)
    # resp = requests.get(get_enc_url)
    # logger.info(ut.repr3(encdict))
    # logger.info(ut.repr3(encounters))

    # Download the files to the local disk
    # fpath_list =

    all_urls = ut.unique(
        ut.take_column(
            ut.flatten(
                ut.dict_subset(keyed_images,
                               ut.flatten(cat_to_keys.values())).values()),
            'url',
        ))

    dldir = ut.truepath('~/tmpsharks')
    from os.path import commonprefix, basename  # NOQA

    prefix = commonprefix(all_urls)
    suffix_list = [url_[len(prefix):] for url_ in all_urls]
    fname_list = [suffix.replace('/', '--') for suffix in suffix_list]

    fpath_list = []
    for url, fname in ut.ProgIter(zip(all_urls, fname_list),
                                  lbl='downloading imgs',
                                  freq=1):
        fpath = ut.grab_file_url(url,
                                 download_dir=dldir,
                                 fname=fname,
                                 verbose=False)
        fpath_list.append(fpath)

    # Make sure we keep orig info
    # url_to_keys = ut.ddict(list)
    url_to_info = ut.ddict(dict)
    for key, imgdict_list in keyed_images.items():
        for imgdict in imgdict_list:
            url = imgdict['url']
            info = url_to_info[url]
            for k, v in imgdict.items():
                info[k] = info.get(k, [])
                info[k].append(v)
            info['keys'] = info.get('keys', [])
            info['keys'].append(key)
            # url_to_keys[url].append(key)

    info_list = ut.take(url_to_info, all_urls)
    for info in info_list:
        if len(set(info['correspondingEncounterNumber'])) > 1:
            assert False, 'url with two different encounter nums'
    # Combine duplicate tags

    hashid_list = [
        ut.get_file_uuid(fpath_, stride=8)
        for fpath_ in ut.ProgIter(fpath_list, bs=True)
    ]
    groupxs = ut.group_indices(hashid_list)[1]

    # Group properties by duplicate images
    # groupxs = [g for g in groupxs if len(g) > 1]
    fpath_list_ = ut.take_column(ut.apply_grouping(fpath_list, groupxs), 0)
    url_list_ = ut.take_column(ut.apply_grouping(all_urls, groupxs), 0)
    info_list_ = [
        ut.map_dict_vals(ut.flatten, ut.dict_accum(*info_))
        for info_ in ut.apply_grouping(info_list, groupxs)
    ]

    encid_list_ = [
        ut.unique(info_['correspondingEncounterNumber'])[0]
        for info_ in info_list_
    ]
    keys_list_ = [ut.unique(info_['keys']) for info_ in info_list_]
    cats_list_ = [ut.unique(ut.take(key_to_cat, keys)) for keys in keys_list_]

    clist = ut.ColumnLists({
        'gpath': fpath_list_,
        'url': url_list_,
        'encid': encid_list_,
        'key': keys_list_,
        'cat': cats_list_,
    })

    # for info_ in ut.apply_grouping(info_list, groupxs):
    #    info = ut.dict_accum(*info_)
    #    info = ut.map_dict_vals(ut.flatten, info)
    #    x = ut.unique(ut.flatten(ut.dict_accum(*info_)['correspondingEncounterNumber']))
    #    if len(x) > 1:
    #        info = info.copy()
    #        del info['keys']
    #        logger.info(ut.repr3(info))

    flags = ut.lmap(ut.fpath_has_imgext, clist['gpath'])
    clist = clist.compress(flags)

    import wbia

    ibs = wbia.opendb('WS_Injury', allow_newdir=True)

    gid_list = ibs.add_images(clist['gpath'])
    clist['gid'] = gid_list

    failed_flags = ut.flag_None_items(clist['gid'])
    logger.info('# failed %s' % (sum(failed_flags), ))
    passed_flags = ut.not_list(failed_flags)
    clist = clist.compress(passed_flags)
    ut.assert_all_not_None(clist['gid'])
    # ibs.get_image_uris_original(clist['gid'])
    ibs.set_image_uris_original(clist['gid'], clist['url'], overwrite=True)

    # ut.zipflat(clist['cat'], clist['key'])
    if False:
        # Can run detection instead
        clist['tags'] = ut.zipflat(clist['cat'])
        aid_list = ibs.use_images_as_annotations(clist['gid'],
                                                 adjust_percent=0.01,
                                                 tags_list=clist['tags'])
        aid_list

    import wbia.plottool as pt
    from wbia import core_annots

    pt.qt4ensure()
    # annots = ibs.annots()
    # aids = [1, 2]
    # ibs.depc_annot.get('hog', aids , 'hog')
    # ibs.depc_annot.get('chip', aids, 'img')
    for aid in ut.InteractiveIter(ibs.get_valid_aids()):
        hogs = ibs.depc_annot.d.get_hog_hog([aid])
        chips = ibs.depc_annot.d.get_chips_img([aid])
        chip = chips[0]
        hogimg = core_annots.make_hog_block_image(hogs[0])
        pt.clf()
        pt.imshow(hogimg, pnum=(1, 2, 1))
        pt.imshow(chip, pnum=(1, 2, 2))
        fig = pt.gcf()
        fig.show()
        fig.canvas.draw()

    # logger.info(len(groupxs))

    # if False:
    # groupxs = ut.find_duplicate_items(ut.lmap(basename, suffix_list)).values()
    # logger.info(ut.repr3(ut.apply_grouping(all_urls, groupxs)))
    #    # FIX
    #    for fpath, fname in zip(fpath_list, fname_list):
    #        if ut.checkpath(fpath):
    #            ut.move(fpath, join(dirname(fpath), fname))
    #            logger.info('fpath = %r' % (fpath,))

    # import wbia
    # from wbia.dbio import ingest_dataset
    # dbdir = wbia.sysres.lookup_dbdir('WS_ALL')
    # self = ingest_dataset.Ingestable2(dbdir)

    if False:
        # Show overlap matrix
        import wbia.plottool as pt
        import pandas as pd
        import numpy as np

        dict_ = overlaps
        s = pd.Series(dict_, index=pd.MultiIndex.from_tuples(overlaps))
        df = s.unstack()
        lhs, rhs = df.align(df.T)
        df = lhs.add(rhs, fill_value=0).fillna(0)

        label_texts = df.columns.values

        def label_ticks(label_texts):
            import wbia.plottool as pt

            truncated_labels = [repr(lbl[0:100]) for lbl in label_texts]
            ax = pt.gca()
            ax.set_xticks(list(range(len(label_texts))))
            ax.set_xticklabels(truncated_labels)
            [lbl.set_rotation(-55) for lbl in ax.get_xticklabels()]
            [
                lbl.set_horizontalalignment('left')
                for lbl in ax.get_xticklabels()
            ]

            # xgrid, ygrid = np.meshgrid(range(len(label_texts)), range(len(label_texts)))
            # pt.plot_surface3d(xgrid, ygrid, disjoint_mat)
            ax.set_yticks(list(range(len(label_texts))))
            ax.set_yticklabels(truncated_labels)
            [
                lbl.set_horizontalalignment('right')
                for lbl in ax.get_yticklabels()
            ]
            [
                lbl.set_verticalalignment('center')
                for lbl in ax.get_yticklabels()
            ]
            # [lbl.set_rotation(20) for lbl in ax.get_yticklabels()]

        # df = df.sort(axis=0)
        # df = df.sort(axis=1)

        sortx = np.argsort(df.sum(axis=1).values)[::-1]
        df = df.take(sortx, axis=0)
        df = df.take(sortx, axis=1)

        fig = pt.figure(fnum=1)
        fig.clf()
        mat = df.values.astype(np.int32)
        mat[np.diag_indices(len(mat))] = 0
        vmax = mat[(1 - np.eye(len(mat))).astype(np.bool)].max()
        import matplotlib.colors

        norm = matplotlib.colors.Normalize(vmin=0, vmax=vmax, clip=True)
        pt.plt.imshow(mat, cmap='hot', norm=norm, interpolation='none')
        pt.plt.colorbar()
        pt.plt.grid('off')
        label_ticks(label_texts)
        fig.tight_layout()

    # overlap_df = pd.DataFrame.from_dict(overlap_img_list)

    class TmpImage(ut.NiceRepr):
        pass

    from skimage.feature import hog
    from skimage import data, color, exposure
    import wbia.plottool as pt

    image2 = color.rgb2gray(data.astronaut())  # NOQA

    fpath = './GOPR1120.JPG'

    import vtool as vt

    for fpath in [fpath]:
        """
        http://scikit-image.org/docs/dev/auto_examples/plot_hog.html
        """

        image = vt.imread(fpath, grayscale=True)
        image = pt.color_funcs.to_base01(image)

        fig = pt.figure(fnum=2)
        fd, hog_image = hog(
            image,
            orientations=8,
            pixels_per_cell=(16, 16),
            cells_per_block=(1, 1),
            visualise=True,
        )

        fig, (ax1, ax2) = pt.plt.subplots(1,
                                          2,
                                          figsize=(8, 4),
                                          sharex=True,
                                          sharey=True)

        ax1.axis('off')
        ax1.imshow(image, cmap=pt.plt.cm.gray)
        ax1.set_title('Input image')
        ax1.set_adjustable('box-forced')

        # Rescale histogram for better display
        hog_image_rescaled = exposure.rescale_intensity(hog_image,
                                                        in_range=(0, 0.02))

        ax2.axis('off')
        ax2.imshow(hog_image_rescaled, cmap=pt.plt.cm.gray)
        ax2.set_title('Histogram of Oriented Gradients')
        ax1.set_adjustable('box-forced')
        pt.plt.show()
Exemple #11
0
def generate_feat_properties(ibs, cid_list, config2_=None, nInput=None):
    r"""
    Computes features and yields results asynchronously: TODO: Remove IBEIS from
    this equation. Move the firewall towards the controller

    Args:
        ibs (IBEISController):
        cid_list (list):
        nInput (None):

    Returns:
        generator : generates param tups

    SeeAlso:
        ~/code/ibeis_cnn/ibeis_cnn/_plugin.py

    CommandLine:
        python -m ibeis.algo.preproc.preproc_feat --test-generate_feat_properties:0 --show
        python -m ibeis.algo.preproc.preproc_feat --test-generate_feat_properties:1

    Example:
        >>> # DISABLE_DOCTEST
        >>> from ibeis.algo.preproc.preproc_feat import *  # NOQA
        >>> import ibeis
        >>> ibs = ibeis.opendb('testdb1')
        >>> config2_ = ibs.new_query_params({})
        >>> nInput = None
        >>> aid_list = ibs.get_valid_aids()[::2]
        >>> ut.assert_all_not_None(aid_list, 'aid_list')
        >>> cid_list = ibs.get_annot_chip_rowids(aid_list, config2_=config2_)
        >>> ut.assert_all_not_None(cid_list, 'cid_list')
        >>> featgen = generate_feat_properties(ibs, cid_list, config2_, nInput)
        >>> feat_list = list(featgen)
        >>> assert len(feat_list) == len(aid_list)
        >>> (nFeat, kpts, vecs) = feat_list[0]
        >>> assert nFeat == len(kpts) and nFeat == len(vecs)
        >>> assert kpts.shape[1] == 6
        >>> assert vecs.shape[1] == 128
        >>> ut.quit_if_noshow()
        >>> import plottool as pt
        >>> chip_fpath = ibs.get_annot_chip_fpath(aid_list[0], config2_=config2_)
        >>> pt.interact_keypoints.ishow_keypoints(chip_fpath, kpts, vecs)
        >>> ut.show_if_requested()

    Example:
        >>> # DISABLE_DOCTEST
        >>> from ibeis.algo.preproc.preproc_feat import *  # NOQA
        >>> import ibeis
        >>> ibs = ibeis.opendb(defaultdb='testdb1')
        >>> cfgdict = {}
        >>> cfgdict['feat_type'] = 'hesaff+siam128'
        >>> qreq_ = ibs.new_query_request([1], [1, 2, 3], cfgdict)
        >>> query_config2 = qreq_.get_external_query_config2()
        >>> data_config2 = qreq_.get_external_data_config2()
        >>> cid_list = ibs.get_annot_chip_rowids(ibs.get_valid_aids())
        >>> config2_ = query_config2
        >>> nInput = None
        >>> featgen = generate_feat_properties(ibs, cid_list, config2_, nInput)
        >>> result = list(featgen)
        >>> print(result)

    Example:
        >>> # DISABLE_DOCTEST
        >>> from ibeis.algo.preproc.preproc_feat import *  # NOQA
        >>> import ibeis
        >>> ibs = ibeis.opendb('PZ_MTEST')
        >>> config2_ = ibs.new_query_params({'affine_invariance': False, 'bgmethod': 'cnn'})
        >>> nInput = None
        >>> aid_list = ibs.get_valid_aids()[0:4]
        >>> ut.assert_all_not_None(aid_list, 'aid_list')
        >>> cid_list = ibs.get_annot_chip_rowids(aid_list, config2_=config2_)
        >>> ut.assert_all_not_None(cid_list, 'cid_list')
        >>> featgen = generate_feat_properties(ibs, cid_list, config2_, nInput)
        >>> feat_list = list(featgen)
        >>> assert len(feat_list) == len(aid_list)
        >>> (nFeat, kpts, vecs) = feat_list[0]
        >>> assert nFeat == len(kpts) and nFeat == len(vecs)
        >>> assert kpts.shape[1] == 6
        >>> assert vecs.shape[1] == 128
        >>> ut.quit_if_noshow()
        >>> import plottool as pt
        >>> chip_fpath = ibs.get_annot_chip_fpath(aid_list[0], config2_=config2_)
        >>> pt.interact_keypoints.ishow_keypoints(chip_fpath, kpts, vecs)
        >>> ut.show_if_requested()

    Ignore:
        # STARTBLOCK
        import plottool as pt
        chip_fpath_list = ibs.get_chip_fpath(cid_list)
        fpath_list = list(ut.interleave((probchip_fpath_list, chip_fpath_list)))
        iteract_obj = pt.interact_multi_image.MultiImageInteraction(fpath_list, nPerPage=4)
        ut.show_if_requested()
        # ENDBLOCK
    """

    if nInput is None:
        nInput = len(cid_list)
    if config2_ is not None:
        # Get config from config2_ object
        # print('id(config2_) = ' + str(id(config2_)))
        feat_cfgstr = config2_.get("feat_cfgstr")
        hesaff_params = config2_.get("hesaff_params")
        feat_type = config2_.get("feat_type")
        bgmethod = config2_.get("bgmethod")
        assert feat_cfgstr is not None
        assert hesaff_params is not None
    else:
        # TODO: assert False here
        # Get config from IBEIS controller
        bgmethod = ibs.cfg.feat_cfg.bgmethod
        feat_type = ibs.cfg.feat_cfg.feat_type
        feat_cfgstr = ibs.cfg.feat_cfg.get_cfgstr()
        hesaff_params = ibs.cfg.feat_cfg.get_hesaff_params()

    ut.assert_all_not_None(cid_list, "cid_list")
    chip_fpath_list = ibs.get_chip_fpath(cid_list, check_external_storage=True)

    if bgmethod is not None:
        aid_list = ibs.get_chip_aids(cid_list)
        probchip_fpath_list = ibs.get_annot_probchip_fpath(aid_list)
    else:
        probchip_fpath_list = (None for _ in range(nInput))

    if ut.NOT_QUIET:
        print("[preproc_feat] feat_cfgstr = %s" % feat_cfgstr)
        if ut.VERYVERBOSE:
            print("hesaff_params = " + ut.dict_str(hesaff_params))

    if feat_type == "hesaff+sift":
        if USE_OPENMP:
            # Use Avi's openmp parallelization
            assert bgmethod is None, "not implemented"
            featgen_mp = gen_feat_openmp(cid_list, chip_fpath_list, hesaff_params)
            featgen = ut.ProgressIter(featgen_mp, lbl="openmp feat")
        else:
            # Multiprocessing parallelization
            featgen = extract_hesaff_sift_feats(
                chip_fpath_list, probchip_fpath_list, hesaff_params=hesaff_params, nInput=nInput, ordered=True
            )
    elif feat_type == "hesaff+siam128":
        from ibeis_cnn import _plugin

        assert bgmethod is None, "not implemented"
        featgen = _plugin.generate_siam_l2_128_feats(ibs, cid_list, config2_=config2_)
    else:
        raise AssertionError("unknown feat_type=%r" % (feat_type,))

    for nFeat, kpts, vecs in featgen:
        yield (nFeat, kpts, vecs)
Exemple #12
0
def compute_or_read_annotation_chips(ibs,
                                     aid_list,
                                     ensure=True,
                                     config2_=None,
                                     verbose=False,
                                     eager=True):
    r"""
    SUPER HACY FUNCTION. NEED TO DEPRICATE

    ----------------------
    Found 1 line(s) in 'ibeis/algo/preproc/preproc_chip.py':
    preproc_chip.py :  25 |def compute_or_read_annotation_chips(ibs, aid_list,
    ensure=True):
    ----------------------
    Found 1 line(s) in 'ibeis/control/manual_chip_funcs.py':
    manual_chip_funcs.py : 313 |    chip_list =
    preproc_chip.compute_or_read_annotation_chips(ibs, aid_list, ensure=ensure)

    """
    if ensure:
        try:
            ut.assert_all_not_None(aid_list, 'aid_list')
        except AssertionError as ex:
            ut.printex(ex, key_list=['aid_list'])
            raise
    nTotal = len(aid_list)
    cfpath_list = make_annot_chip_fpath_list(ibs, aid_list, config2_=config2_)
    mk_cpath_iter = functools.partial(ut.ProgressIter,
                                      cfpath_list,
                                      nTotal=nTotal,
                                      enabled=verbose,
                                      freq=100)
    try:
        if ensure:
            cfpath_iter = mk_cpath_iter(lbl='reading ensured chips')
            chip_list = [vt.imread(cfpath) for cfpath in cfpath_iter]
            #for cfpath in cfpath_iter:
            #    yield vt.imread(cfpath)
        else:
            cfpath_iter = mk_cpath_iter(lbl='reading existing chips')
            chip_list = [
                None if cfpath is None else vt.imread(cfpath)
                for cfpath in cfpath_iter
            ]
            #for cfpath in cfpath_iter:
            #    yield None if cfpath is None else vt.imread(cfpath)
    except IOError as ex:
        if not ut.QUIET:
            ut.printex(ex,
                       '[preproc_chip] Handing Exception: ',
                       iswarning=True)
        ibs.add_annot_chips(aid_list)
        try:
            cfpath_iter = mk_cpath_iter(lbl='reading fallback1 chips')
            chip_list = [vt.imread(cfpath) for cfpath in cfpath_iter]
        except IOError:
            print('[preproc_chip] cache must have been deleted from disk')
            # TODO: WE CAN SEARCH FOR NON EXISTANT PATHS HERE AND CALL
            # ibs.delete_annot_chips
            compute_and_write_chips_lazy(ibs, aid_list)
            # Try just one more time
            cfpath_iter = mk_cpath_iter(lbl='reading fallback2 chips')
            chip_list = [vt.imread(cfpath) for cfpath in cfpath_iter]

    return chip_list
Exemple #13
0
def add_annot_chips(ibs, aid_list, config2_=None, verbose=not ut.QUIET, return_num_dirty=False):
    r"""
    annot.chip.add(aid_list)

    CRITICAL FUNCTION MUST EXIST FOR ALL DEPENDANTS
    Adds / ensures / computes a dependant property

    Args:
         aid_list

    Returns:
        returns chip_rowid_list of added (or already existing chips)

    TemplateInfo:
        python -m ibeis.templates.template_generator --key chip --funcname-filter "\<add_annot_chips\>" --modfname=manual_chip_funcs
        python -m ibeis.templates.template_generator --key chip --modfname=manual_chip_funcs --funcname-filter "\<add_annot_chip"

        Tadder_pl_dependant
        parent = annot
        leaf = chip

    CommandLine:
        python -m ibeis.control.manual_chip_funcs --test-add_annot_chips

    RESTful:
        Method: POST
        URL:    /api/annot_chip/

    Example0:
        >>> # ENABLE_DOCTEST
        >>> from ibeis.control.manual_chip_funcs import *  # NOQA
        >>> ibs, config2_ = testdata_ibs()
        >>> aid_list = ibs._get_all_aids()[::3]
        >>> chip_rowid_list = ibs.add_annot_chips(aid_list, config2_=config2_)
        >>> assert len(chip_rowid_list) == len(aid_list)
        >>> ut.assert_all_not_None(chip_rowid_list)

    Example1:
        >>> # ENABLE_DOCTEST
        >>> from ibeis.control.manual_chip_funcs import *  # NOQA
        >>> ibs, config2_ = testdata_ibs()
        >>> aid_list = ibs._get_all_aids()[0:10]
        >>> sub_aid_list1 = aid_list[0:6]
        >>> sub_aid_list2 = aid_list[5:7]
        >>> sub_aid_list3 = aid_list[0:7]
        >>> sub_chip_rowid_list1 = ibs.get_annot_chip_rowids(sub_aid_list1, config2_=config2_, ensure=True)
        >>> ibs.get_annot_chip_rowids(sub_aid_list1, config2_=config2_, ensure=True)
        >>> sub_chip_rowid_list1, num_dirty0 = ibs.add_annot_chips(sub_aid_list1, config2_=config2_, return_num_dirty=True)
        >>> assert num_dirty0 == 0
        >>> ut.assert_all_not_None(sub_chip_rowid_list1)
        >>> ibs.delete_annot_chips(sub_aid_list2)
        >>> #ibs.delete_annot_chip(sub_aid_list2)?
        >>> sub_chip_rowid_list3 = ibs.get_annot_chip_rowids(sub_aid_list3, config2_=config2_, ensure=False)
        >>> # Only the last two should be None
        >>> ut.assert_all_not_None(sub_chip_rowid_list3[0:5], 'sub_chip_rowid_list3[0:5])')
        >>> assert sub_chip_rowid_list3[5:7] == [None, None]
        >>> sub_chip_rowid_list3_ensured, num_dirty1 = ibs.add_annot_chips(sub_aid_list3, config2_=config2_, return_num_dirty=True)
        >>> assert num_dirty1 == 2, 'Only two params should have been computed here'
        >>> ut.assert_all_not_None(sub_chip_rowid_list3_ensured)
    """
    from ibeis.algo.preproc import preproc_chip
    ut.assert_all_not_None(aid_list, ' annot_rowid_list')
    # Get requested configuration id
    config_rowid = ibs.get_chip_config_rowid(config2_=config2_)
    # Find leaf rowids that need to be computed
    initial_chip_rowid_list = get_annot_chip_rowids_(ibs, aid_list, config2_=config2_)
    # Get corresponding "dirty" parent rowids
    isdirty_list = ut.flag_None_items(initial_chip_rowid_list)
    dirty_aid_list = ut.compress(aid_list, isdirty_list)
    num_dirty = len(dirty_aid_list)
    num_total = len(aid_list)
    if num_dirty > 0:
        if verbose:
            fmtstr = '[add_annot_chips] adding %d / %d new chip for config_rowid=%r'
            print(fmtstr % (num_dirty, num_total, config_rowid))
        # Dependant columns do not need true from_superkey getters.
        # We can use the Tgetter_pl_dependant_rowids_ instead
        get_rowid_from_superkey = functools.partial(
            ibs.get_annot_chip_rowids_, config2_=config2_)
        proptup_gen = preproc_chip.generate_chip_properties(ibs, dirty_aid_list, config2_=config2_)
        dirty_params_iter = (
            (aid, config_rowid, chip_uri, chip_width, chip_height)
            for aid, (chip_uri, chip_width, chip_height,) in
            zip(dirty_aid_list, proptup_gen)
        )
        colnames = ['annot_rowid', 'config_rowid',
                    'chip_uri', 'chip_width', 'chip_height']
        #chip_rowid_list = ibs.dbcache.add_cleanly(const.CHIP_TABLE, colnames, dirty_params_iter, get_rowid_from_superkey)
        CHUNKED_ADD = True
        if CHUNKED_ADD:
            chunksize = 32 if ut.WIN32 else 128
            for dirty_params_chunk in ut.ichunks(dirty_params_iter, chunksize=chunksize):
                nInput = len(dirty_params_chunk)
                ibs.dbcache._add(
                    const.CHIP_TABLE, colnames, dirty_params_chunk, nInput=nInput)
        else:
            nInput = num_dirty
            ibs.dbcache._add(
                const.CHIP_TABLE, colnames, dirty_params_iter, nInput=nInput)
        # Now that the dirty params are added get the correct order of rowids
        chip_rowid_list = get_rowid_from_superkey(aid_list)
    else:
        chip_rowid_list = initial_chip_rowid_list
    if return_num_dirty:
        return chip_rowid_list, num_dirty
    return chip_rowid_list
def add_annot_chips(ibs, aid_list, qreq_=None):
    """ annot.chip.add(aid_list)

    CRITICAL FUNCTION MUST EXIST FOR ALL DEPENDANTS
    Adds / ensures / computes a dependant property

    Args:
         aid_list

    Returns:
        returns chip_rowid_list of added (or already existing chips)

    TemplateInfo:
        Tadder_pl_dependant
        parent = annot
        leaf = chip

    CommandLine:
        python -m ibeis.control.manual_chip_funcs --test-add_annot_chips

    Example1:
        >>> # ENABLE_DOCTEST
        >>> from ibeis.control.manual_chip_funcs import *  # NOQA
        >>> ibs, qreq_ = testdata_ibs()
        >>> aid_list = ibs._get_all_aids()[::3]
        >>> chip_rowid_list = ibs.add_annot_chips(aid_list, qreq_=qreq_)
        >>> assert len(chip_rowid_list) == len(aid_list)
        >>> ut.assert_all_not_None(chip_rowid_list)

    Example2:
        >>> # ENABLE_DOCTEST
        >>> from ibeis.control.manual_chip_funcs import *  # NOQA
        >>> ibs, qreq_ = testdata_ibs()
        >>> aid_list = ibs._get_all_aids()[0:10]
        >>> sub_aid_list1 = aid_list[0:6]
        >>> sub_aid_list2 = aid_list[5:7]
        >>> sub_aid_list3 = aid_list[0:7]
        >>> sub_cid_list1 = ibs.get_annot_chip_rowids(sub_aid_list1, qreq_=qreq_, ensure=True)
        >>> ut.assert_all_not_None(sub_cid_list1)
        >>> ibs.delete_annot_chips(sub_aid_list2)
        >>> sub_cid_list3 = ibs.get_annot_chip_rowids(sub_aid_list3, qreq_=qreq_, ensure=False)
        >>> # Only the last two should be None
        >>> ut.assert_all_not_None(sub_cid_list3)
        >>> assert sub_cid_list3[5:7] == [None, None]
        >>> sub_cid_list3_ensured = ibs.get_annot_chip_rowids(sub_aid_list3, qreq_=qreq_, ensure=True)
        >>> # Only two params should have been computed here
        >>> ut.assert_all_not_None(sub_cid_list3_ensured)

    """
    from ibeis.model.preproc import preproc_chip
    ut.assert_all_not_None(aid_list, 'aid_list')
    # Get requested configuration id
    config_rowid = ibs.get_chip_config_rowid(qreq_=qreq_)
    # Find leaf rowids that need to be computed
    initial_chip_rowid_list = get_annot_chip_rowids_(ibs, aid_list, qreq_=qreq_)
    # Get corresponding "dirty" parent rowids
    isdirty_list = ut.flag_None_items(initial_chip_rowid_list)
    dirty_aid_list = ut.filter_items(aid_list, isdirty_list)
    num_dirty = len(dirty_aid_list)
    if num_dirty > 0:
        #if ut.VERBOSE:
        print('[add_annot_chips] adding %d / %d new chips' % (len(dirty_aid_list), len(aid_list)))
        # Dependant columns do not need true from_superkey getters.
        # We can use the Tgetter_pl_dependant_rowids_ instead
        get_rowid_from_superkey = functools.partial(
            ibs.get_annot_chip_rowids_, qreq_=qreq_)
        proptup_gen = preproc_chip.generate_chip_properties(ibs, dirty_aid_list)
        dirty_params_iter = (
            (aid, config_rowid, chip_uri, chip_width, chip_height)
            for aid, (chip_uri, chip_width, chip_height,) in
            zip(dirty_aid_list, proptup_gen)
        )
        dirty_params_iter = list(dirty_params_iter)
        colnames = ['annot_rowid', 'config_rowid',
                    'chip_uri', 'chip_width', 'chip_height']
        #chip_rowid_list = ibs.dbcache.add_cleanly(
        #    const.CHIP_TABLE, colnames, params_iter, get_rowid_from_superkey)
        ibs.dbcache._add(const.CHIP_TABLE, colnames, dirty_params_iter)
        # Now that the dirty params are added get the correct order of rowids
        chip_rowid_list = get_rowid_from_superkey(aid_list)
    else:
        chip_rowid_list = initial_chip_rowid_list
    return chip_rowid_list
Exemple #15
0
def add_chip_feat(ibs, chip_rowid_list, config2_=None, verbose=not ut.QUIET, return_num_dirty=False):
    """ chip.feat.add(chip_rowid_list)

    CRITICAL FUNCTION MUST EXIST FOR ALL DEPENDANTS
    Adds / ensures / computes a dependant property
    Args:
         chip_rowid_list

    Returns:
        returns feat_rowid_list of added (or already existing feats)

    TemplateInfo:
        python -m ibeis.templates.template_generator --key feat --funcname-filter "\<add_chip_feat\>" --modfname=manual_feat_funcs
        Tadder_pl_dependant
        parent = chip
        leaf = feat

    CommandLine:
        python -m ibeis.control.manual_feat_funcs --test-add_chip_feat

    Example0:
        >>> # ENABLE_DOCTEST
        >>> from ibeis.control.manual_feat_funcs import *  # NOQA
        >>> ibs, config2_ = testdata_ibs()
        >>> ibs.get_annot_chip_rowids(ibs.get_valid_aids())  # Ensure chips are computed
        >>> chip_rowid_list = ibs._get_all_chip_rowids()[::3]
        >>> feat_rowid_list = ibs.add_chip_feat(chip_rowid_list, config2_=config2_)
        >>> assert len(feat_rowid_list) == len(chip_rowid_list), 'bad length'
        >>> ut.assert_all_not_None(feat_rowid_list)

    Example1:
        >>> # ENABLE_DOCTEST
        >>> from ibeis.control.manual_feat_funcs import *  # NOQA
        >>> ibs, config2_ = testdata_ibs()
        >>> ibs.get_annot_chip_rowids(ibs.get_valid_aids())  # Ensure chips are computed
        >>> chip_rowid_list = ibs._get_all_chip_rowids()[0:10]
        >>> assert len(chip_rowid_list) == 10, 'chips not computed'
        >>> sub_chip_rowid_list1 = chip_rowid_list[0:6]
        >>> sub_chip_rowid_list2 = chip_rowid_list[5:7]
        >>> sub_chip_rowid_list3 = chip_rowid_list[0:7]
        >>> sub_feat_rowid_list1 = ibs.get_chip_feat_rowid(sub_chip_rowid_list1, config2_=config2_, ensure=True)
        >>> ibs.get_chip_feat_rowid(sub_chip_rowid_list1, config2_=config2_, ensure=True)
        >>> sub_feat_rowid_list1, num_dirty0 = ibs.add_chip_feat(sub_chip_rowid_list1, config2_=config2_, return_num_dirty=True)
        >>> assert num_dirty0 == 0, 'num_dirty0=%r' % (num_dirty0,)
        >>> ut.assert_all_not_None(sub_feat_rowid_list1)
        >>> ibs.delete_chip_feats(sub_chip_rowid_list2)
        >>> #ibs.delete_chip_feat(sub_chip_rowid_list2)?
        >>> sub_feat_rowid_list3 = ibs.get_chip_feat_rowid(sub_chip_rowid_list3, config2_=config2_, ensure=False)
        >>> # Only the last two should be None
        >>> ut.assert_all_not_None(sub_feat_rowid_list3[0:5], 'sub_feat_rowid_list3[0:5])')
        >>> assert sub_feat_rowid_list3[5:7] == [None, None], 'sub_feat_rowid_list3=%r' % (sub_feat_rowid_list3,)
        >>> sub_feat_rowid_list3_ensured, num_dirty1 = ibs.add_chip_feat(sub_chip_rowid_list3, config2_=config2_,  return_num_dirty=True)
        >>> assert num_dirty1 == 2, 'Only two params should have been computed here'
        >>> ut.assert_all_not_None(sub_feat_rowid_list3_ensured)
    """
    from ibeis.algo.preproc import preproc_feat
    ut.assert_all_not_None(chip_rowid_list, ' chip_rowid_list')
    # Get requested configuration id
    config_rowid = ibs.get_feat_config_rowid(config2_=config2_)
    # Find leaf rowids that need to be computed
    initial_feat_rowid_list = get_chip_feat_rowids_(
        ibs, chip_rowid_list, config2_=config2_)
    # Get corresponding "dirty" parent rowids
    isdirty_list = ut.flag_None_items(initial_feat_rowid_list)
    dirty_chip_rowid_list = ut.compress(chip_rowid_list, isdirty_list)
    num_dirty = len(dirty_chip_rowid_list)
    num_total = len(chip_rowid_list)
    if num_dirty > 0:
        if verbose:
            fmtstr = '[add_chip_feat] adding %d / %d new feat for config_rowid=%r'
            print(fmtstr % (num_dirty, num_total, config_rowid))
        # Dependant columns do not need true from_superkey getters.
        # We can use the Tgetter_pl_dependant_rowids_ instead
        get_rowid_from_superkey = functools.partial(
            ibs.get_chip_feat_rowids_, config2_=config2_)
        # CALL EXTERNAL PREPROCESSING / GENERATION FUNCTION
        proptup_gen = preproc_feat.generate_feat_properties(
            ibs, dirty_chip_rowid_list, config2_=config2_)
        dirty_params_iter = (
            (chip_rowid, config_rowid, feature_nFeat,
             feature_kpt_arr, feature_vec_arr)
            for chip_rowid, (feature_nFeat, feature_kpt_arr, feature_vec_arr,) in
            zip(dirty_chip_rowid_list, proptup_gen)
        )
        colnames = ['chip_rowid', 'config_rowid',
                    'feature_num_feats', 'feature_keypoints', 'feature_vecs']
        #feat_rowid_list = ibs.dbcache.add_cleanly(const.FEATURE_TABLE, colnames, dirty_params_iter, get_rowid_from_superkey)
        CHUNKED_ADD = True
        if CHUNKED_ADD:
            chunksize = 128
            print('[add_chip_feat] adding to sql in chunks with chunksize=%r' % (chunksize,))
            for dirty_params_chunk in ut.ichunks(dirty_params_iter, chunksize=chunksize):
                print('[add_chip_feat] adding feature chunk to sql')
                nInput = len(dirty_params_chunk)
                ibs.dbcache._add(
                    const.FEATURE_TABLE, colnames, dirty_params_chunk, nInput=nInput)
        else:
            nInput = num_dirty
            ibs.dbcache._add(
                const.FEATURE_TABLE, colnames, dirty_params_iter, nInput=nInput)

        #ibs.dbcache._add(const.FEATURE_TABLE, colnames, dirty_params_iter)
        # Now that the dirty params are added get the correct order of rowids
        feat_rowid_list = get_rowid_from_superkey(chip_rowid_list)
    else:
        feat_rowid_list = initial_feat_rowid_list
    if return_num_dirty:
        return feat_rowid_list, num_dirty
    return feat_rowid_list
def add_parts(ibs,
              aid_list,
              bbox_list=None,
              theta_list=None,
              detect_confidence_list=None,
              notes_list=None,
              vert_list=None,
              part_uuid_list=None,
              viewpoint_list=None,
              quality_list=None,
              type_list=None,
              staged_uuid_list=None,
              staged_user_id_list=None,
              **kwargs):
    r"""
    Adds an part to annotations

    Args:
        aid_list                 (list): annotation rowids to add part to
        bbox_list                (list): of [x, y, w, h] bounding boxes for each annotation (supply verts instead)
        theta_list               (list): orientations of parts
        vert_list                (list): alternative to bounding box

    Returns:
        list: part_rowid_list

    Ignore:
       detect_confidence_list = None
       notes_list = None
       part_uuid_list = None
       viewpoint_list = None
       quality_list = None
       type_list = None

    RESTful:
        Method: POST
        URL:    /api/part/
    """
    # ut.embed()
    from vtool import geometry

    if ut.VERBOSE:
        logger.info('[ibs] adding parts')
    # Prepare the SQL input
    # For import only, we can specify both by setting import_override to True
    assert bool(bbox_list is None) != bool(
        vert_list is None
    ), 'must specify exactly one of bbox_list or vert_list'
    ut.assert_all_not_None(aid_list, 'aid_list')

    if vert_list is None:
        vert_list = geometry.verts_list_from_bboxes_list(bbox_list)
    elif bbox_list is None:
        bbox_list = geometry.bboxes_from_vert_list(vert_list)

    if theta_list is None:
        theta_list = [0.0 for _ in range(len(aid_list))]

    len_bbox = len(bbox_list)
    len_vert = len(vert_list)
    len_aid = len(aid_list)
    len_theta = len(theta_list)
    try:
        assert len_vert == len_bbox, 'bbox and verts are not of same size'
        assert len_aid == len_bbox, 'bbox and aid are not of same size'
        assert len_aid == len_theta, 'bbox and aid are not of same size'
    except AssertionError as ex:
        ut.printex(ex,
                   key_list=['len_vert', 'len_aid', 'len_bbox'
                             'len_theta'])
        raise

    if len(aid_list) == 0:
        # nothing is being added
        logger.info('[ibs] WARNING: 0 parts are being added!')
        logger.info(ut.repr2(locals()))
        return []

    if detect_confidence_list is None:
        detect_confidence_list = [0.0 for _ in range(len(aid_list))]
    if notes_list is None:
        notes_list = ['' for _ in range(len(aid_list))]
    if viewpoint_list is None:
        viewpoint_list = [-1.0] * len(aid_list)
    if type_list is None:
        type_list = [const.UNKNOWN] * len(aid_list)

    nVert_list = [len(verts) for verts in vert_list]
    vertstr_list = [six.text_type(verts) for verts in vert_list]
    xtl_list, ytl_list, width_list, height_list = list(zip(*bbox_list))
    assert len(nVert_list) == len(vertstr_list)

    # Build ~~deterministic?~~ random and unique PART ids
    if part_uuid_list is None:
        part_uuid_list = [uuid.uuid4() for _ in range(len(aid_list))]

    if staged_uuid_list is None:
        staged_uuid_list = [None] * len(aid_list)
    is_staged_list = [
        staged_uuid is not None for staged_uuid in staged_uuid_list
    ]
    if staged_user_id_list is None:
        staged_user_id_list = [None] * len(aid_list)

    # Define arguments to insert
    colnames = (
        'part_uuid',
        'annot_rowid',
        'part_xtl',
        'part_ytl',
        'part_width',
        'part_height',
        'part_theta',
        'part_num_verts',
        'part_verts',
        'part_viewpoint',
        'part_detect_confidence',
        'part_note',
        'part_type',
        'part_staged_flag',
        'part_staged_uuid',
        'part_staged_user_identity',
    )

    check_uuid_flags = [
        not isinstance(auuid, uuid.UUID) for auuid in part_uuid_list
    ]
    if any(check_uuid_flags):
        pos = ut.list_where(check_uuid_flags)
        raise ValueError('positions %r have malformated UUIDS' % (pos, ))

    params_iter = list(
        zip(
            part_uuid_list,
            aid_list,
            xtl_list,
            ytl_list,
            width_list,
            height_list,
            theta_list,
            nVert_list,
            vertstr_list,
            viewpoint_list,
            detect_confidence_list,
            notes_list,
            type_list,
            is_staged_list,
            staged_uuid_list,
            staged_user_id_list,
        ))

    # Execute add PARTs SQL
    superkey_paramx = (0, )
    get_rowid_from_superkey = ibs.get_part_rowids_from_uuid
    part_rowid_list = ibs.db.add_cleanly(const.PART_TABLE, colnames,
                                         params_iter, get_rowid_from_superkey,
                                         superkey_paramx)
    return part_rowid_list
Exemple #17
0
def compute_or_read_chip_images(ibs, cid_list, ensure=True, config2_=None):
    """Reads chips and tries to compute them if they do not exist

    Args:
        ibs (IBEISController):
        cid_list (list):
        ensure (bool):

    Returns:
        chip_list

    CommandLine:
        python -m ibeis.algo.preproc.preproc_chip --test-compute_or_read_chip_images

    Example:
        >>> # SLOW_DOCTEST
        >>> from ibeis.algo.preproc.preproc_chip import *  # NOQA
        >>> from ibeis.algo.preproc import preproc_chip
        >>> import numpy as np
        >>> ibs, aid_list = testdata_ibeis()
        >>> cid_list = ibs.get_annot_chip_rowids(aid_list, ensure=True)
        >>> chip_list = compute_or_read_chip_images(ibs, cid_list)
        >>> result = np.array(list(map(np.shape, chip_list))).sum(0).tolist()
        >>> print(result)
        [1434, 2274, 12]

    Example:
        >>> # SLOW_DOCTEST
        >>> from ibeis.algo.preproc.preproc_chip import *  # NOQA
        >>> import numpy as np
        >>> ibs, aid_list = testdata_ibeis()
        >>> cid_list = ibs.get_annot_chip_rowids(aid_list, ensure=True)
        >>> # Do a bad thing. Remove from disk without removing from sql
        >>> on_delete(ibs, cid_list)
        >>> # Now compute_or_read_chip_images should catch the bad thing
        >>> # we did and correct for it.
        >>> chip_list = compute_or_read_chip_images(ibs, cid_list)
        >>> result = np.array(list(map(np.shape, chip_list))).sum(0).tolist()
        >>> print(result)
        [1434, 2274, 12]
    """
    cfpath_list = ibs.get_chip_fpath(cid_list)
    try:
        if ensure:
            try:
                ut.assert_all_not_None(cid_list, "cid_list")
            except AssertionError as ex:
                ut.printex(ex, key_list=["cid_list"])
                raise
            else:
                chip_list = [vt.imread(cfpath) for cfpath in cfpath_list]
        else:
            chip_list = [None if cfpath is None else vt.imread(cfpath) for cfpath in cfpath_list]
    except IOError as ex:
        if not ut.QUIET:
            ut.printex(ex, "[preproc_chip] Handing Exception: ", iswarning=True)
        # Remove bad annotations from the sql database
        aid_list = ibs.get_chip_aids(cid_list)
        valid_list = [cid is not None for cid in cid_list]
        valid_aids = ut.compress(aid_list, valid_list)
        valid_cfpaths = ut.compress(cfpath_list, valid_list)
        invalid_aids = ut.filterfalse_items(valid_aids, map(exists, valid_cfpaths))
        ibs.delete_annot_chips(invalid_aids)
        # Try readding things
        new_cid_list = ibs.add_annot_chips(aid_list)
        cfpath_list = ibs.get_chip_fpath(new_cid_list)
        chip_list = [vt.imread(cfpath) for cfpath in cfpath_list]
    return chip_list
def add_feat_featweights(ibs, feat_rowid_list, config2_=None, verbose=not ut.QUIET, return_num_dirty=False):
    """ feat.featweight.add(feat_rowid_list)

    CRITICAL FUNCTION MUST EXIST FOR ALL DEPENDANTS
    Adds / ensures / computes a dependant property

    Args:
         feat_rowid_list

    Returns:
        returns featweight_rowid_list of added (or already existing featweights)

    TemplateInfo:
        Tadder_pl_dependant
        parent = feat
        leaf = featweight

    Example0:
        >>> # SLOW_DOCTEST
        >>> from ibeis.control._autogen_featweight_funcs import *  # NOQA
        >>> ibs, config2_ = testdata_ibs()
        >>> from ibeis import constants as const
        >>> aid_list = ibs.get_valid_aids(species=const.TEST_SPECIES.ZEB_PLAIN)[:2]
        >>> if 'annot' != 'feat':
        ...     feat_rowid_list = ibs.get_annot_feat_rowids(aid_list, config2_=config2_, ensure=True)
        >>> featweight_rowid_list = ibs.add_feat_featweights(feat_rowid_list, config2_=config2_)
        >>> assert len(featweight_rowid_list) == len(feat_rowid_list)
        >>> ut.assert_all_not_None(featweight_rowid_list)

    Example1:
        >>> # SLOW_DOCTEST
        >>> from ibeis.control._autogen_featweight_funcs import *  # NOQA
        >>> ibs, config2_ = testdata_ibs('PZ_MTEST')
        >>> from ibeis import constants as const
        >>> aid_list = ibs.get_valid_aids(species=const.TEST_SPECIES.ZEB_PLAIN)[0:7]
        >>> if 'annot' != 'feat':
        ...     feat_rowid_list = ibs.get_annot_feat_rowids(aid_list, config2_=config2_, ensure=True)
        >>> sub_feat_rowid_list1 = feat_rowid_list[0:6]
        >>> sub_feat_rowid_list2 = feat_rowid_list[5:7]
        >>> sub_feat_rowid_list3 = feat_rowid_list[0:7]
        >>> sub_featweight_rowid_list1 = ibs.get_feat_featweight_rowids(sub_feat_rowid_list1, config2_=config2_, ensure=True)
        >>> ibs.get_feat_featweight_rowids(sub_feat_rowid_list1, config2_=config2_, ensure=True)
        >>> sub_featweight_rowid_list1, num_dirty0 = ibs.add_feat_featweights(sub_feat_rowid_list1, config2_=config2_, return_num_dirty=True)
        >>> assert num_dirty0 == 0
        >>> ut.assert_all_not_None(sub_featweight_rowid_list1)
        >>> ibs.delete_feat_featweight(sub_feat_rowid_list2)
        >>> #ibs.delete_feat_featweight(sub_feat_rowid_list2)?
        >>> sub_featweight_rowid_list3 = ibs.get_feat_featweight_rowids(sub_feat_rowid_list3, config2_=config2_, ensure=False)
        >>> # Only the last two should be None
        >>> ut.assert_all_not_None(sub_featweight_rowid_list3[0:5], 'sub_featweight_rowid_list3[0:5])')
        >>> ut.assert_eq(sub_featweight_rowid_list3[5:7], [None, None])
        >>> sub_featweight_rowid_list3_ensured, num_dirty1 = ibs.add_feat_featweights(sub_feat_rowid_list3, config2_=config2_,  return_num_dirty=True)
        >>> ut.assert_eq(num_dirty1, 2, 'Only two params should have been computed here')
        >>> ut.assert_all_not_None(sub_featweight_rowid_list3_ensured)
    """
    from ibeis.algo.preproc import preproc_featweight
    ut.assert_all_not_None(feat_rowid_list, ' feat_rowid_list')
    # Get requested configuration id
    config_rowid = ibs.get_featweight_config_rowid(config2_=config2_)
    # Find leaf rowids that need to be computed
    initial_featweight_rowid_list = get_feat_featweight_rowids_(
        ibs, feat_rowid_list, config2_=config2_)
    # Get corresponding "dirty" parent rowids
    isdirty_list = ut.flag_None_items(initial_featweight_rowid_list)
    dirty_feat_rowid_list = ut.compress(feat_rowid_list, isdirty_list)
    num_dirty = len(dirty_feat_rowid_list)
    num_total = len(feat_rowid_list)
    if num_dirty > 0:
        if verbose:
            fmtstr = '[add_feat_featweights] adding %d / %d new featweight for config_rowid=%r'
            print(fmtstr % (num_dirty, num_total, config_rowid))
        # Dependant columns do not need true from_superkey getters.
        # We can use the Tgetter_pl_dependant_rowids_ instead
        get_rowid_from_superkey = functools.partial(
            ibs.get_feat_featweight_rowids_, config2_=config2_)
        proptup_gen = preproc_featweight.generate_featweight_properties(
            ibs, dirty_feat_rowid_list, config2_=config2_)
        dirty_params_iter = (
            (feat_rowid, config_rowid, fgweight)
            for feat_rowid, (fgweight,) in
            zip(dirty_feat_rowid_list, proptup_gen)
        )
        colnames = [
            'feature_rowid', 'config_rowid', 'featweight_forground_weight']
        #featweight_rowid_list = ibs.dbcache.add_cleanly(const.FEATURE_WEIGHT_TABLE, colnames, dirty_params_iter, get_rowid_from_superkey)
        ibs.dbcache._add(
            const.FEATURE_WEIGHT_TABLE, colnames, dirty_params_iter)
        # Now that the dirty params are added get the correct order of rowids
        featweight_rowid_list = get_rowid_from_superkey(feat_rowid_list)
    else:
        featweight_rowid_list = initial_featweight_rowid_list
    if return_num_dirty:
        return featweight_rowid_list, num_dirty
    return featweight_rowid_list
Exemple #19
0
def setup_incremental_test(ibs_gt, clear_names=True, aid_order='shuffle'):
    r"""
    CommandLine:
        python -m ibeis.algo.hots.automated_helpers --test-setup_incremental_test:0

        python dev.py -t custom --cfg codename:vsone_unnorm --db PZ_MTEST --allgt --vf --va
        python dev.py -t custom --cfg codename:vsone_unnorm --db PZ_MTEST --allgt --vf --va --index 0 4 8 --verbose

    Example:
        >>> # DISABLE_DOCTEST
        >>> from ibeis.algo.hots.automated_helpers import *  # NOQA
        >>> import ibeis # NOQA
        >>> ibs_gt = ibeis.opendb('PZ_MTEST')
        >>> ibs2, aid_list1, aid1_to_aid2 = setup_incremental_test(ibs_gt)

    Example:
        >>> # DISABLE_DOCTEST
        >>> from ibeis.algo.hots.automated_helpers import *  # NOQA
        >>> import ibeis  # NOQA
        >>> ibs_gt = ibeis.opendb('GZ_ALL')
        >>> ibs2, aid_list1, aid1_to_aid2 = setup_incremental_test(ibs_gt)
    """
    print('\n\n---- SETUP INCREMENTAL TEST ---\n\n')
    # Take a known dataase
    # Create an empty database to test in

    ONLY_GT = True
    if ONLY_GT:
        # use only annotations that will have matches in test
        aid_list1_ = ibs_gt.get_aids_with_groundtruth()
    else:
        # use every annotation in test
        aid_list1_ = ibs_gt.get_valid_aids()

    if ut.get_argflag('--gzdev'):
        # Use a custom selection of gzall
        from ibeis.algo.hots import devcases
        assert ibs_gt.get_dbname() == 'GZ_ALL', 'not gzall'
        vuuid_list, ignore_vuuids = devcases.get_gzall_small_test()
        # TODO; include all names of these annots too
        aid_list = ibs_gt.get_annot_aids_from_visual_uuid(vuuid_list)
        ignore_aid_list = ibs_gt.get_annot_aids_from_visual_uuid(ignore_vuuids)
        ignore_nid_list = ibs_gt.get_annot_nids(ignore_aid_list)
        ut.assert_all_not_None(aid_list)
        other_aids = ut.flatten(ibs_gt.get_annot_groundtruth(aid_list))
        aid_list.extend(other_aids)
        aid_list = sorted(set(aid_list))
        nid_list = ibs_gt.get_annot_nids(aid_list)
        isinvalid_list = [nid in ignore_nid_list for nid in nid_list]
        print('Filtering %r annots specified to ignore' % (sum(isinvalid_list),))
        aid_list = ut.filterfalse_items(aid_list, isinvalid_list)
        #ut.embed()
        aid_list1_ = aid_list
        #ut.embed()

    # Add aids in a random order
    VALID_ORDERS = ['shuffle', 'stagger', 'same']
    #AID_ORDER = 'shuffle'
    aid_order = ut.get_argval('--aid-order', default=aid_order)
    assert VALID_ORDERS.index(aid_order) > -1

    if aid_order == 'shuffle':
        aid_list1 = ut.deterministic_shuffle(aid_list1_[:])
    elif aid_order == 'stagger':
        from six.moves import zip_longest, filter
        aid_groups, unique_nid_list = ibs_gt.group_annots_by_name(aid_list1_)
        def stagger_group(list_):
            return ut.filter_Nones(ut.iflatten(zip_longest(*list_)))
        aid_multiton_group = list(filter(lambda aids: len(aids) > 1, aid_groups))
        aid_list1 = stagger_group(aid_multiton_group)
        pass
    elif aid_order == 'same':
        aid_list1 = aid_list1_

    # If reset is true the test database is started completely from scratch
    reset = ut.get_argflag('--reset')

    aid1_to_aid2 = {}  # annotation mapping

    ibs2 = make_incremental_test_database(ibs_gt, aid_list1, reset)

    # Preadd all annotatinos to the test database
    aids_chunk1 = aid_list1
    aid_list2 = add_annot_chunk(ibs_gt, ibs2, aids_chunk1, aid1_to_aid2)

    #ut.embed()
    # Assert annotation visual uuids are in agreement
    if ut.DEBUG2:
        annot_testdb_consistency_checks(ibs_gt, ibs2, aid_list1, aid_list2)

    # Remove names and exemplar information from test database
    if clear_names:
        ensure_testdb_clean_data(ibs_gt, ibs2, aid_list1, aid_list2)

    # Preprocess features before testing
    ibs2.ensure_annotation_data(aid_list2, featweights=True)

    return ibs2, aid_list1, aid1_to_aid2
Exemple #20
0
def generate_feat_properties(ibs, cid_list, config2_=None, nInput=None):
    r"""
    Computes features and yields results asynchronously: TODO: Remove IBEIS from
    this equation. Move the firewall towards the controller

    Args:
        ibs (IBEISController):
        cid_list (list):
        nInput (None):

    Returns:
        generator : generates param tups

    SeeAlso:
        ~/code/ibeis_cnn/ibeis_cnn/_plugin.py

    CommandLine:
        python -m ibeis.algo.preproc.preproc_feat --test-generate_feat_properties:0 --show
        python -m ibeis.algo.preproc.preproc_feat --test-generate_feat_properties:1

    Example:
        >>> # DISABLE_DOCTEST
        >>> from ibeis.algo.preproc.preproc_feat import *  # NOQA
        >>> import ibeis
        >>> ibs = ibeis.opendb('testdb1')
        >>> config2_ = ibs.new_query_params({})
        >>> nInput = None
        >>> aid_list = ibs.get_valid_aids()[::2]
        >>> ut.assert_all_not_None(aid_list, 'aid_list')
        >>> cid_list = ibs.get_annot_chip_rowids(aid_list, config2_=config2_)
        >>> ut.assert_all_not_None(cid_list, 'cid_list')
        >>> featgen = generate_feat_properties(ibs, cid_list, config2_, nInput)
        >>> feat_list = list(featgen)
        >>> assert len(feat_list) == len(aid_list)
        >>> (nFeat, kpts, vecs) = feat_list[0]
        >>> assert nFeat == len(kpts) and nFeat == len(vecs)
        >>> assert kpts.shape[1] == 6
        >>> assert vecs.shape[1] == 128
        >>> ut.quit_if_noshow()
        >>> import plottool as pt
        >>> chip_fpath = ibs.get_annot_chip_fpath(aid_list[0], config2_=config2_)
        >>> pt.interact_keypoints.ishow_keypoints(chip_fpath, kpts, vecs)
        >>> ut.show_if_requested()

    Example:
        >>> # DISABLE_DOCTEST
        >>> from ibeis.algo.preproc.preproc_feat import *  # NOQA
        >>> import ibeis
        >>> ibs = ibeis.opendb(defaultdb='testdb1')
        >>> cfgdict = {}
        >>> cfgdict['feat_type'] = 'hesaff+siam128'
        >>> qreq_ = ibs.new_query_request([1], [1, 2, 3], cfgdict)
        >>> query_config2 = qreq_.get_external_query_config2()
        >>> data_config2 = qreq_.get_external_data_config2()
        >>> cid_list = ibs.get_annot_chip_rowids(ibs.get_valid_aids())
        >>> config2_ = query_config2
        >>> nInput = None
        >>> featgen = generate_feat_properties(ibs, cid_list, config2_, nInput)
        >>> result = list(featgen)
        >>> print(result)

    Example:
        >>> # DISABLE_DOCTEST
        >>> from ibeis.algo.preproc.preproc_feat import *  # NOQA
        >>> import ibeis
        >>> ibs = ibeis.opendb('PZ_MTEST')
        >>> config2_ = ibs.new_query_params({'affine_invariance': False, 'bgmethod': 'cnn'})
        >>> nInput = None
        >>> aid_list = ibs.get_valid_aids()[0:4]
        >>> ut.assert_all_not_None(aid_list, 'aid_list')
        >>> cid_list = ibs.get_annot_chip_rowids(aid_list, config2_=config2_)
        >>> ut.assert_all_not_None(cid_list, 'cid_list')
        >>> featgen = generate_feat_properties(ibs, cid_list, config2_, nInput)
        >>> feat_list = list(featgen)
        >>> assert len(feat_list) == len(aid_list)
        >>> (nFeat, kpts, vecs) = feat_list[0]
        >>> assert nFeat == len(kpts) and nFeat == len(vecs)
        >>> assert kpts.shape[1] == 6
        >>> assert vecs.shape[1] == 128
        >>> ut.quit_if_noshow()
        >>> import plottool as pt
        >>> chip_fpath = ibs.get_annot_chip_fpath(aid_list[0], config2_=config2_)
        >>> pt.interact_keypoints.ishow_keypoints(chip_fpath, kpts, vecs)
        >>> ut.show_if_requested()

    Ignore:
        # STARTBLOCK
        import plottool as pt
        chip_fpath_list = ibs.get_chip_fpath(cid_list)
        fpath_list = list(ut.interleave((probchip_fpath_list, chip_fpath_list)))
        iteract_obj = pt.interact_multi_image.MultiImageInteraction(fpath_list, nPerPage=4)
        ut.show_if_requested()
        # ENDBLOCK
    """

    if nInput is None:
        nInput = len(cid_list)
    if config2_ is not None:
        # Get config from config2_ object
        #print('id(config2_) = ' + str(id(config2_)))
        feat_cfgstr = config2_.get('feat_cfgstr')
        hesaff_params = config2_.get('hesaff_params')
        feat_type = config2_.get('feat_type')
        bgmethod = config2_.get('bgmethod')
        assert feat_cfgstr is not None
        assert hesaff_params is not None
    else:
        # TODO: assert False here
        # Get config from IBEIS controller
        bgmethod = ibs.cfg.feat_cfg.bgmethod
        feat_type = ibs.cfg.feat_cfg.feat_type
        feat_cfgstr = ibs.cfg.feat_cfg.get_cfgstr()
        hesaff_params = ibs.cfg.feat_cfg.get_hesaff_params()

    ut.assert_all_not_None(cid_list, 'cid_list')
    chip_fpath_list = ibs.get_chip_fpath(cid_list, check_external_storage=True)

    if bgmethod is not None:
        aid_list = ibs.get_chip_aids(cid_list)
        probchip_fpath_list = ibs.get_annot_probchip_fpath(aid_list)
    else:
        probchip_fpath_list = (None for _ in range(nInput))

    if ut.NOT_QUIET:
        print('[preproc_feat] feat_cfgstr = %s' % feat_cfgstr)
        if ut.VERYVERBOSE:
            print('hesaff_params = ' + ut.dict_str(hesaff_params))

    if feat_type == 'hesaff+sift':
        if USE_OPENMP:
            # Use Avi's openmp parallelization
            assert bgmethod is None, 'not implemented'
            featgen_mp = gen_feat_openmp(cid_list, chip_fpath_list,
                                         hesaff_params)
            featgen = ut.ProgressIter(featgen_mp, lbl='openmp feat')
        else:
            # Multiprocessing parallelization
            featgen = extract_hesaff_sift_feats(chip_fpath_list,
                                                probchip_fpath_list,
                                                hesaff_params=hesaff_params,
                                                nInput=nInput,
                                                ordered=True)
    elif feat_type == 'hesaff+siam128':
        from ibeis_cnn import _plugin
        assert bgmethod is None, 'not implemented'
        featgen = _plugin.generate_siam_l2_128_feats(ibs,
                                                     cid_list,
                                                     config2_=config2_)
    else:
        raise AssertionError('unknown feat_type=%r' % (feat_type, ))

    for nFeat, kpts, vecs in featgen:
        yield (
            nFeat,
            kpts,
            vecs,
        )
Exemple #21
0
def setup_incremental_test(ibs_gt, clear_names=True, aid_order='shuffle'):
    r"""
    CommandLine:
        python -m ibeis.algo.hots.automated_helpers --test-setup_incremental_test:0

        python dev.py -t custom --cfg codename:vsone_unnorm --db PZ_MTEST --allgt --vf --va
        python dev.py -t custom --cfg codename:vsone_unnorm --db PZ_MTEST --allgt --vf --va --index 0 4 8 --verbose

    Example:
        >>> # DISABLE_DOCTEST
        >>> from ibeis.algo.hots.automated_helpers import *  # NOQA
        >>> import ibeis # NOQA
        >>> ibs_gt = ibeis.opendb('PZ_MTEST')
        >>> ibs2, aid_list1, aid1_to_aid2 = setup_incremental_test(ibs_gt)

    Example:
        >>> # DISABLE_DOCTEST
        >>> from ibeis.algo.hots.automated_helpers import *  # NOQA
        >>> import ibeis  # NOQA
        >>> ibs_gt = ibeis.opendb('GZ_ALL')
        >>> ibs2, aid_list1, aid1_to_aid2 = setup_incremental_test(ibs_gt)
    """
    print('\n\n---- SETUP INCREMENTAL TEST ---\n\n')
    # Take a known dataase
    # Create an empty database to test in

    ONLY_GT = True
    if ONLY_GT:
        # use only annotations that will have matches in test
        aid_list1_ = ibs_gt.get_aids_with_groundtruth()
    else:
        # use every annotation in test
        aid_list1_ = ibs_gt.get_valid_aids()

    if ut.get_argflag('--gzdev'):
        # Use a custom selection of gzall
        from ibeis.algo.hots import devcases
        assert ibs_gt.get_dbname() == 'GZ_ALL', 'not gzall'
        vuuid_list, ignore_vuuids = devcases.get_gzall_small_test()
        # TODO; include all names of these annots too
        aid_list = ibs_gt.get_annot_aids_from_visual_uuid(vuuid_list)
        ignore_aid_list = ibs_gt.get_annot_aids_from_visual_uuid(ignore_vuuids)
        ignore_nid_list = ibs_gt.get_annot_nids(ignore_aid_list)
        ut.assert_all_not_None(aid_list)
        other_aids = ut.flatten(ibs_gt.get_annot_groundtruth(aid_list))
        aid_list.extend(other_aids)
        aid_list = sorted(set(aid_list))
        nid_list = ibs_gt.get_annot_nids(aid_list)
        isinvalid_list = [nid in ignore_nid_list for nid in nid_list]
        print('Filtering %r annots specified to ignore' %
              (sum(isinvalid_list), ))
        aid_list = ut.filterfalse_items(aid_list, isinvalid_list)
        #ut.embed()
        aid_list1_ = aid_list
        #ut.embed()

    # Add aids in a random order
    VALID_ORDERS = ['shuffle', 'stagger', 'same']
    #AID_ORDER = 'shuffle'
    aid_order = ut.get_argval('--aid-order', default=aid_order)
    assert VALID_ORDERS.index(aid_order) > -1

    if aid_order == 'shuffle':
        aid_list1 = ut.deterministic_shuffle(aid_list1_[:])
    elif aid_order == 'stagger':
        from six.moves import zip_longest, filter
        aid_groups, unique_nid_list = ibs_gt.group_annots_by_name(aid_list1_)

        def stagger_group(list_):
            return ut.filter_Nones(ut.iflatten(zip_longest(*list_)))

        aid_multiton_group = list(
            filter(lambda aids: len(aids) > 1, aid_groups))
        aid_list1 = stagger_group(aid_multiton_group)
        pass
    elif aid_order == 'same':
        aid_list1 = aid_list1_

    # If reset is true the test database is started completely from scratch
    reset = ut.get_argflag('--reset')

    aid1_to_aid2 = {}  # annotation mapping

    ibs2 = make_incremental_test_database(ibs_gt, aid_list1, reset)

    # Preadd all annotatinos to the test database
    aids_chunk1 = aid_list1
    aid_list2 = add_annot_chunk(ibs_gt, ibs2, aids_chunk1, aid1_to_aid2)

    #ut.embed()
    # Assert annotation visual uuids are in agreement
    if ut.DEBUG2:
        annot_testdb_consistency_checks(ibs_gt, ibs2, aid_list1, aid_list2)

    # Remove names and exemplar information from test database
    if clear_names:
        ensure_testdb_clean_data(ibs_gt, ibs2, aid_list1, aid_list2)

    # Preprocess features before testing
    ibs2.ensure_annotation_data(aid_list2, featweights=True)

    return ibs2, aid_list1, aid1_to_aid2
Exemple #22
0
def compute_or_read_chip_images(ibs, cid_list, ensure=True, config2_=None):
    """Reads chips and tries to compute them if they do not exist

    Args:
        ibs (IBEISController):
        cid_list (list):
        ensure (bool):

    Returns:
        chip_list

    CommandLine:
        python -m ibeis.algo.preproc.preproc_chip --test-compute_or_read_chip_images

    Example:
        >>> # SLOW_DOCTEST
        >>> from ibeis.algo.preproc.preproc_chip import *  # NOQA
        >>> from ibeis.algo.preproc import preproc_chip
        >>> import numpy as np
        >>> ibs, aid_list = testdata_ibeis()
        >>> cid_list = ibs.get_annot_chip_rowids(aid_list, ensure=True)
        >>> chip_list = compute_or_read_chip_images(ibs, cid_list)
        >>> result = np.array(list(map(np.shape, chip_list))).sum(0).tolist()
        >>> print(result)
        [1434, 2274, 12]

    Example:
        >>> # SLOW_DOCTEST
        >>> from ibeis.algo.preproc.preproc_chip import *  # NOQA
        >>> import numpy as np
        >>> ibs, aid_list = testdata_ibeis()
        >>> cid_list = ibs.get_annot_chip_rowids(aid_list, ensure=True)
        >>> # Do a bad thing. Remove from disk without removing from sql
        >>> on_delete(ibs, cid_list)
        >>> # Now compute_or_read_chip_images should catch the bad thing
        >>> # we did and correct for it.
        >>> chip_list = compute_or_read_chip_images(ibs, cid_list)
        >>> result = np.array(list(map(np.shape, chip_list))).sum(0).tolist()
        >>> print(result)
        [1434, 2274, 12]
    """
    cfpath_list = ibs.get_chip_fpath(cid_list)
    try:
        if ensure:
            try:
                ut.assert_all_not_None(cid_list, 'cid_list')
            except AssertionError as ex:
                ut.printex(ex, key_list=['cid_list'])
                raise
            else:
                chip_list = [vt.imread(cfpath) for cfpath in cfpath_list]
        else:
            chip_list = [
                None if cfpath is None else vt.imread(cfpath)
                for cfpath in cfpath_list
            ]
    except IOError as ex:
        if not ut.QUIET:
            ut.printex(ex,
                       '[preproc_chip] Handing Exception: ',
                       iswarning=True)
        # Remove bad annotations from the sql database
        aid_list = ibs.get_chip_aids(cid_list)
        valid_list = [cid is not None for cid in cid_list]
        valid_aids = ut.compress(aid_list, valid_list)
        valid_cfpaths = ut.compress(cfpath_list, valid_list)
        invalid_aids = ut.filterfalse_items(valid_aids,
                                            map(exists, valid_cfpaths))
        ibs.delete_annot_chips(invalid_aids)
        # Try readding things
        new_cid_list = ibs.add_annot_chips(aid_list)
        cfpath_list = ibs.get_chip_fpath(new_cid_list)
        chip_list = [vt.imread(cfpath) for cfpath in cfpath_list]
    return chip_list