Esempio n. 1
0
def sanity_checks(offset_list, Y_list, query_annots, ibs):
    nfeat_list = np.diff(offset_list)
    for Y, nfeat in ut.ProgIter(zip(Y_list, nfeat_list), 'checking'):
        assert nfeat == sum(ut.lmap(len, Y.fxs_list))

    if False:
        # Visualize queries
        # Look at the standard query images here
        # http://www.robots.ox.ac.uk:5000/~vgg/publications/2007/Philbin07/philbin07.pdf
        from wbia.viz import viz_chip
        import wbia.plottool as pt

        pt.qt4ensure()
        fnum = 1
        pnum_ = pt.make_pnum_nextgen(len(query_annots.aids) // 5, 5)
        for aid in ut.ProgIter(query_annots.aids):
            pnum = pnum_()
            viz_chip.show_chip(
                ibs,
                aid,
                in_image=True,
                annote=False,
                notitle=True,
                draw_lbls=False,
                fnum=fnum,
                pnum=pnum,
            )
Esempio n. 2
0
def plot_gps_html(gps_list):
    """ Plots gps coordinates on a map projection

    InstallBasemap:
        sudo apt-get install libgeos-dev
        pip install git+https://github.com/matplotlib/basemap
        http://matplotlib.org/basemap/users/examples.html

        pip install gmplot

        sudo apt-get install netcdf-bin
        sudo apt-get install libnetcdf-dev
        pip install netCDF4

    Ignore:
        pip install git+git://github.com/myuser/foo.git@v123

    Example:
        >>> # DISABLE_DOCTEST
        >>> from wbia.algo.preproc.preproc_occurrence import *  # NOQA
        >>> import wbia
        >>> ibs = wbia.opendb(defaultdb='testdb1')
        >>> images = ibs.images()
        >>> # Setup GPS points to draw
        >>> print('Setup GPS points')
        >>> gps_list_ = np.array(images.gps2)
        >>> unixtime_list_ = np.array(images.unixtime2)
        >>> has_gps = np.all(np.logical_not(np.isnan(gps_list_)), axis=1)
        >>> has_unixtime = np.logical_not(np.isnan(unixtime_list_))
        >>> isvalid = np.logical_and(has_gps, has_unixtime)
        >>> gps_list = gps_list_.compress(isvalid, axis=0)
        >>> unixtime_list = unixtime_list_.compress(isvalid)  # NOQA
        >>> plot_image_gps(gps_list)
    """
    import wbia.plottool as pt
    import gmplot
    import matplotlib as mpl
    import vtool as vt

    pt.qt4ensure()

    lat = gps_list.T[0]
    lon = gps_list.T[1]

    # Get extent of
    bbox = vt.bbox_from_verts(gps_list)
    centerx, centery = vt.bbox_center(bbox)

    gmap = gmplot.GoogleMapPlotter(centerx, centery, 13)
    color = mpl.colors.rgb2hex(pt.ORANGE)
    gmap.scatter(lat, lon, color=color, size=100, marker=False)
    gmap.draw('mymap.html')
    ut.startfile('mymap.html')
Esempio n. 3
0
def fix_splits_interaction(ibs):
    """
    python -m wbia fix_splits_interaction --show

    Example:
        >>> # DISABLE_DOCTEST GGR
        >>> from wbia.other.dbinfo import *  # NOQA
        >>> import wbia
        >>> dbdir = '/media/danger/GGR/GGR-IBEIS'
        >>> dbdir = dbdir if ut.checkpath(dbdir) else ut.truepath('~/lev/media/danger/GGR/GGR-IBEIS')
        >>> ibs = wbia.opendb(dbdir=dbdir, allow_newdir=False)
        >>> import wbia.guitool as gt
        >>> gt.ensure_qtapp()
        >>> win = fix_splits_interaction(ibs)
        >>> ut.quit_if_noshow()
        >>> import wbia.plottool as pt
        >>> gt.qtapp_loop(qwin=win)
    """
    split_props = {'splitcase', 'photobomb'}
    all_annot_groups = ibs._annot_groups(
        ibs.group_annots_by_name(ibs.get_valid_aids())[0])
    all_has_split = [
        len(split_props.intersection(ut.flatten(tags))) > 0
        for tags in all_annot_groups.match_tags
    ]
    tosplit_annots = ut.compress(all_annot_groups.annots_list, all_has_split)

    tosplit_annots = ut.take(tosplit_annots,
                             ut.argsort(ut.lmap(len, tosplit_annots)))[::-1]
    if ut.get_argflag('--reverse'):
        tosplit_annots = tosplit_annots[::-1]
    logger.info('len(tosplit_annots) = %r' % (len(tosplit_annots), ))
    aids_list = [a.aids for a in tosplit_annots]

    from wbia.algo.graph import graph_iden
    from wbia.viz import viz_graph2
    import wbia.guitool as gt
    import wbia.plottool as pt

    pt.qt4ensure()
    gt.ensure_qtapp()

    for aids in ut.InteractiveIter(aids_list):
        infr = graph_iden.AnnotInference(ibs, aids)
        infr.initialize_graph()
        win = viz_graph2.AnnotGraphWidget(infr=infr,
                                          use_image=False,
                                          init_mode='rereview')
        win.populate_edge_model()
        win.show()
    return win
Esempio n. 4
0
def gridsearch_ratio_thresh(matches):
    import sklearn
    import sklearn.metrics
    import vtool as vt

    # Param search for vsone
    import wbia.plottool as pt

    pt.qt4ensure()

    skf = sklearn.model_selection.StratifiedKFold(n_splits=10, random_state=119372)

    y = np.array([m.annot1['nid'] == m.annot2['nid'] for m in matches])

    basis = {'ratio_thresh': np.linspace(0.6, 0.7, 50).tolist()}
    grid = ut.all_dict_combinations(basis)
    xdata = np.array(ut.take_column(grid, 'ratio_thresh'))

    def _ratio_thresh(y_true, match_list):
        # Try and find optional ratio threshold
        auc_list = []
        for cfgdict in ut.ProgIter(grid, lbl='gridsearch'):
            y_score = [
                match.fs.compress(match.ratio_test_flags(cfgdict)).sum()
                for match in match_list
            ]
            auc = sklearn.metrics.roc_auc_score(y_true, y_score)
            auc_list.append(auc)
        auc_list = np.array(auc_list)
        return auc_list

    auc_list = _ratio_thresh(y, matches)
    pt.plot(xdata, auc_list)
    subx, suby = vt.argsubmaxima(auc_list, xdata)
    best_ratio_thresh = subx[suby.argmax()]

    skf_results = []
    y_true = y
    for train_idx, test_idx in skf.split(matches, y):
        match_list_ = ut.take(matches, train_idx)
        y_true = y.take(train_idx)
        auc_list = _ratio_thresh(y_true, match_list_)
        subx, suby = vt.argsubmaxima(auc_list, xdata, maxima_thresh=0.8)
        best_ratio_thresh = subx[suby.argmax()]
        skf_results.append(best_ratio_thresh)
    logger.info('skf_results.append = %r' % (np.mean(skf_results),))
    import utool

    utool.embed()
Esempio n. 5
0
def show_data_image(data_uri_order, i, offset_list, all_kpts, all_vecs):
    """
    i = 12
    """
    import vtool as vt
    from os.path import join

    imgdir = ut.truepath('/raid/work/Oxford/oxbuild_images')
    gpath = join(imgdir, data_uri_order[i] + '.jpg')
    image = vt.imread(gpath)
    import wbia.plottool as pt

    pt.qt4ensure()
    # pt.imshow(image)
    left = offset_list[i]
    right = offset_list[i + 1]
    kpts = all_kpts[left:right]
    vecs = all_vecs[left:right]
    pt.interact_keypoints.ishow_keypoints(image,
                                          kpts,
                                          vecs,
                                          ori=False,
                                          ell_alpha=0.4,
                                          color='distinct')
Esempio n. 6
0
def dans_splits(ibs):
    """
    python -m wbia dans_splits --show

    Example:
        >>> # DISABLE_DOCTEST GGR
        >>> from wbia.other.dbinfo import *  # NOQA
        >>> import wbia
        >>> dbdir = '/media/danger/GGR/GGR-IBEIS'
        >>> dbdir = dbdir if ut.checkpath(dbdir) else ut.truepath('~/lev/media/danger/GGR/GGR-IBEIS')
        >>> ibs = wbia.opendb(dbdir=dbdir, allow_newdir=False)
        >>> import wbia.guitool as gt
        >>> gt.ensure_qtapp()
        >>> win = dans_splits(ibs)
        >>> ut.quit_if_noshow()
        >>> import wbia.plottool as pt
        >>> gt.qtapp_loop(qwin=win)
    """
    # pair = 9262, 932

    dans_aids = [
        26548,
        2190,
        9418,
        29965,
        14738,
        26600,
        3039,
        2742,
        8249,
        20154,
        8572,
        4504,
        34941,
        4040,
        7436,
        31866,
        28291,
        16009,
        7378,
        14453,
        2590,
        2738,
        22442,
        26483,
        21640,
        19003,
        13630,
        25395,
        20015,
        14948,
        21429,
        19740,
        7908,
        23583,
        14301,
        26912,
        30613,
        19719,
        21887,
        8838,
        16184,
        9181,
        8649,
        8276,
        14678,
        21950,
        4925,
        13766,
        12673,
        8417,
        2018,
        22434,
        21149,
        14884,
        5596,
        8276,
        14650,
        1355,
        21725,
        21889,
        26376,
        2867,
        6906,
        4890,
        21524,
        6690,
        14738,
        1823,
        35525,
        9045,
        31723,
        2406,
        5298,
        15627,
        31933,
        19535,
        9137,
        21002,
        2448,
        32454,
        12615,
        31755,
        20015,
        24573,
        32001,
        23637,
        3192,
        3197,
        8702,
        1240,
        5596,
        33473,
        23874,
        9558,
        9245,
        23570,
        33075,
        23721,
        24012,
        33405,
        23791,
        19498,
        33149,
        9558,
        4971,
        34183,
        24853,
        9321,
        23691,
        9723,
        9236,
        9723,
        21078,
        32300,
        8700,
        15334,
        6050,
        23277,
        31164,
        14103,
        21231,
        8007,
        10388,
        33387,
        4319,
        26880,
        8007,
        31164,
        32300,
        32140,
    ]

    is_hyrbid = [  # NOQA
        7123,
        7166,
        7157,
        7158,
    ]
    needs_mask = [26836, 29742]  # NOQA
    justfine = [19862]  # NOQA

    annots = ibs.annots(dans_aids)
    unique_nids = ut.unique(annots.nids)
    grouped_aids = ibs.get_name_aids(unique_nids)
    annot_groups = ibs._annot_groups(grouped_aids)

    split_props = {'splitcase', 'photobomb'}
    needs_tag = [
        len(split_props.intersection(ut.flatten(tags))) == 0
        for tags in annot_groups.match_tags
    ]
    num_needs_tag = sum(needs_tag)
    num_had_split = len(needs_tag) - num_needs_tag
    logger.info('num_had_split = %r' % (num_had_split, ))
    logger.info('num_needs_tag = %r' % (num_needs_tag, ))

    # all_annot_groups = ibs._annot_groups(ibs.group_annots_by_name(ibs.get_valid_aids())[0])
    # all_has_split = [len(split_props.intersection(ut.flatten(tags))) > 0 for tags in all_annot_groups.match_tags]
    # num_nondan = sum(all_has_split) - num_had_split
    # logger.info('num_nondan = %r' % (num_nondan,))

    from wbia.algo.graph import graph_iden
    from wbia.viz import viz_graph2
    import wbia.guitool as gt
    import wbia.plottool as pt

    pt.qt4ensure()
    gt.ensure_qtapp()

    aids_list = ut.compress(grouped_aids, needs_tag)
    aids_list = [a for a in aids_list if len(a) > 1]
    logger.info('len(aids_list) = %r' % (len(aids_list), ))

    for aids in aids_list:
        infr = graph_iden.AnnotInference(ibs, aids)
        infr.initialize_graph()
        win = viz_graph2.AnnotGraphWidget(infr=infr,
                                          use_image=False,
                                          init_mode='rereview')
        win.populate_edge_model()
        win.show()
        return win
    assert False
Esempio n. 7
0
def get_injured_sharks():
    """
    >>> from wbia.scripts.getshark import *  # NOQA
    """
    import requests

    url = 'http://www.whaleshark.org/getKeywordImages.jsp'
    resp = requests.get(url)
    assert resp.status_code == 200
    keywords = resp.json()['keywords']
    key_list = ut.take_column(keywords, 'indexName')
    key_to_nice = {k['indexName']: k['readableName'] for k in keywords}

    injury_patterns = [
        'injury',
        'net',
        'hook',
        'trunc',
        'damage',
        'scar',
        'nicks',
        'bite',
    ]

    injury_keys = [
        key for key in key_list if any([pat in key for pat in injury_patterns])
    ]
    noninjury_keys = ut.setdiff(key_list, injury_keys)
    injury_nice = ut.lmap(lambda k: key_to_nice[k], injury_keys)  # NOQA
    noninjury_nice = ut.lmap(lambda k: key_to_nice[k], noninjury_keys)  # NOQA
    key_list = injury_keys

    keyed_images = {}
    for key in ut.ProgIter(key_list, lbl='reading index', bs=True):
        key_url = url + '?indexName={indexName}'.format(indexName=key)
        key_resp = requests.get(key_url)
        assert key_resp.status_code == 200
        key_imgs = key_resp.json()['images']
        keyed_images[key] = key_imgs

    key_hist = {key: len(imgs) for key, imgs in keyed_images.items()}
    key_hist = ut.sort_dict(key_hist, 'vals')
    logger.info(ut.repr3(key_hist))
    nice_key_hist = ut.map_dict_keys(lambda k: key_to_nice[k], key_hist)
    nice_key_hist = ut.sort_dict(nice_key_hist, 'vals')
    logger.info(ut.repr3(nice_key_hist))

    key_to_urls = {
        key: ut.take_column(vals, 'url')
        for key, vals in keyed_images.items()
    }
    overlaps = {}
    import itertools

    overlap_img_list = []
    for k1, k2 in itertools.combinations(key_to_urls.keys(), 2):
        overlap_imgs = ut.isect(key_to_urls[k1], key_to_urls[k2])
        num_overlap = len(overlap_imgs)
        overlaps[(k1, k2)] = num_overlap
        overlaps[(k1, k1)] = len(key_to_urls[k1])
        if num_overlap > 0:
            # logger.info('[%s][%s], overlap=%r' % (k1, k2, num_overlap))
            overlap_img_list.extend(overlap_imgs)

    all_img_urls = list(set(ut.flatten(key_to_urls.values())))
    num_all = len(all_img_urls)  # NOQA
    logger.info('num_all = %r' % (num_all, ))

    # Determine super-categories
    categories = ['nicks', 'scar', 'trunc']

    # Force these keys into these categories
    key_to_cat = {'scarbite': 'other_injury'}

    cat_to_keys = ut.ddict(list)

    for key in key_to_urls.keys():
        flag = 1
        if key in key_to_cat:
            cat = key_to_cat[key]
            cat_to_keys[cat].append(key)
            continue
        for cat in categories:
            if cat in key:
                cat_to_keys[cat].append(key)
                flag = 0
        if flag:
            cat = 'other_injury'
            cat_to_keys[cat].append(key)

    cat_urls = ut.ddict(list)
    for cat, keys in cat_to_keys.items():
        for key in keys:
            cat_urls[cat].extend(key_to_urls[key])

    cat_hist = {}
    for cat in list(cat_urls.keys()):
        cat_urls[cat] = list(set(cat_urls[cat]))
        cat_hist[cat] = len(cat_urls[cat])

    logger.info(ut.repr3(cat_to_keys))
    logger.info(ut.repr3(cat_hist))

    key_to_cat = dict([(val, key) for key, vals in cat_to_keys.items()
                       for val in vals])

    # ingestset = {
    #    '__class__': 'ImageSet',
    #    'images': ut.ddict(dict)
    # }
    # for key, key_imgs in keyed_images.items():
    #    for imgdict in key_imgs:
    #        url = imgdict['url']
    #        encid = imgdict['correspondingEncounterNumber']
    #        # Make structure
    #        encdict = encounters[encid]
    #        encdict['__class__'] = 'Encounter'
    #        imgdict = ut.delete_keys(imgdict.copy(), ['correspondingEncounterNumber'])
    #        imgdict['__class__'] = 'Image'
    #        cat = key_to_cat[key]
    #        annotdict = {'relative_bbox': [.01, .01, .98, .98], 'tags': [cat, key]}
    #        annotdict['__class__'] = 'Annotation'

    #        # Ensure structures exist
    #        encdict['images'] = encdict.get('images', [])
    #        imgdict['annots'] = imgdict.get('annots', [])

    #        # Add an image to this encounter
    #        encdict['images'].append(imgdict)
    #        # Add an annotation to this image
    #        imgdict['annots'].append(annotdict)

    # # http://springbreak.wildbook.org/rest/org.ecocean.Encounter/1111
    # get_enc_url = 'http://www.whaleshark.org/rest/org.ecocean.Encounter/%s' % (encid,)
    # resp = requests.get(get_enc_url)
    # logger.info(ut.repr3(encdict))
    # logger.info(ut.repr3(encounters))

    # Download the files to the local disk
    # fpath_list =

    all_urls = ut.unique(
        ut.take_column(
            ut.flatten(
                ut.dict_subset(keyed_images,
                               ut.flatten(cat_to_keys.values())).values()),
            'url',
        ))

    dldir = ut.truepath('~/tmpsharks')
    from os.path import commonprefix, basename  # NOQA

    prefix = commonprefix(all_urls)
    suffix_list = [url_[len(prefix):] for url_ in all_urls]
    fname_list = [suffix.replace('/', '--') for suffix in suffix_list]

    fpath_list = []
    for url, fname in ut.ProgIter(zip(all_urls, fname_list),
                                  lbl='downloading imgs',
                                  freq=1):
        fpath = ut.grab_file_url(url,
                                 download_dir=dldir,
                                 fname=fname,
                                 verbose=False)
        fpath_list.append(fpath)

    # Make sure we keep orig info
    # url_to_keys = ut.ddict(list)
    url_to_info = ut.ddict(dict)
    for key, imgdict_list in keyed_images.items():
        for imgdict in imgdict_list:
            url = imgdict['url']
            info = url_to_info[url]
            for k, v in imgdict.items():
                info[k] = info.get(k, [])
                info[k].append(v)
            info['keys'] = info.get('keys', [])
            info['keys'].append(key)
            # url_to_keys[url].append(key)

    info_list = ut.take(url_to_info, all_urls)
    for info in info_list:
        if len(set(info['correspondingEncounterNumber'])) > 1:
            assert False, 'url with two different encounter nums'
    # Combine duplicate tags

    hashid_list = [
        ut.get_file_uuid(fpath_, stride=8)
        for fpath_ in ut.ProgIter(fpath_list, bs=True)
    ]
    groupxs = ut.group_indices(hashid_list)[1]

    # Group properties by duplicate images
    # groupxs = [g for g in groupxs if len(g) > 1]
    fpath_list_ = ut.take_column(ut.apply_grouping(fpath_list, groupxs), 0)
    url_list_ = ut.take_column(ut.apply_grouping(all_urls, groupxs), 0)
    info_list_ = [
        ut.map_dict_vals(ut.flatten, ut.dict_accum(*info_))
        for info_ in ut.apply_grouping(info_list, groupxs)
    ]

    encid_list_ = [
        ut.unique(info_['correspondingEncounterNumber'])[0]
        for info_ in info_list_
    ]
    keys_list_ = [ut.unique(info_['keys']) for info_ in info_list_]
    cats_list_ = [ut.unique(ut.take(key_to_cat, keys)) for keys in keys_list_]

    clist = ut.ColumnLists({
        'gpath': fpath_list_,
        'url': url_list_,
        'encid': encid_list_,
        'key': keys_list_,
        'cat': cats_list_,
    })

    # for info_ in ut.apply_grouping(info_list, groupxs):
    #    info = ut.dict_accum(*info_)
    #    info = ut.map_dict_vals(ut.flatten, info)
    #    x = ut.unique(ut.flatten(ut.dict_accum(*info_)['correspondingEncounterNumber']))
    #    if len(x) > 1:
    #        info = info.copy()
    #        del info['keys']
    #        logger.info(ut.repr3(info))

    flags = ut.lmap(ut.fpath_has_imgext, clist['gpath'])
    clist = clist.compress(flags)

    import wbia

    ibs = wbia.opendb('WS_Injury', allow_newdir=True)

    gid_list = ibs.add_images(clist['gpath'])
    clist['gid'] = gid_list

    failed_flags = ut.flag_None_items(clist['gid'])
    logger.info('# failed %s' % (sum(failed_flags), ))
    passed_flags = ut.not_list(failed_flags)
    clist = clist.compress(passed_flags)
    ut.assert_all_not_None(clist['gid'])
    # ibs.get_image_uris_original(clist['gid'])
    ibs.set_image_uris_original(clist['gid'], clist['url'], overwrite=True)

    # ut.zipflat(clist['cat'], clist['key'])
    if False:
        # Can run detection instead
        clist['tags'] = ut.zipflat(clist['cat'])
        aid_list = ibs.use_images_as_annotations(clist['gid'],
                                                 adjust_percent=0.01,
                                                 tags_list=clist['tags'])
        aid_list

    import wbia.plottool as pt
    from wbia import core_annots

    pt.qt4ensure()
    # annots = ibs.annots()
    # aids = [1, 2]
    # ibs.depc_annot.get('hog', aids , 'hog')
    # ibs.depc_annot.get('chip', aids, 'img')
    for aid in ut.InteractiveIter(ibs.get_valid_aids()):
        hogs = ibs.depc_annot.d.get_hog_hog([aid])
        chips = ibs.depc_annot.d.get_chips_img([aid])
        chip = chips[0]
        hogimg = core_annots.make_hog_block_image(hogs[0])
        pt.clf()
        pt.imshow(hogimg, pnum=(1, 2, 1))
        pt.imshow(chip, pnum=(1, 2, 2))
        fig = pt.gcf()
        fig.show()
        fig.canvas.draw()

    # logger.info(len(groupxs))

    # if False:
    # groupxs = ut.find_duplicate_items(ut.lmap(basename, suffix_list)).values()
    # logger.info(ut.repr3(ut.apply_grouping(all_urls, groupxs)))
    #    # FIX
    #    for fpath, fname in zip(fpath_list, fname_list):
    #        if ut.checkpath(fpath):
    #            ut.move(fpath, join(dirname(fpath), fname))
    #            logger.info('fpath = %r' % (fpath,))

    # import wbia
    # from wbia.dbio import ingest_dataset
    # dbdir = wbia.sysres.lookup_dbdir('WS_ALL')
    # self = ingest_dataset.Ingestable2(dbdir)

    if False:
        # Show overlap matrix
        import wbia.plottool as pt
        import pandas as pd
        import numpy as np

        dict_ = overlaps
        s = pd.Series(dict_, index=pd.MultiIndex.from_tuples(overlaps))
        df = s.unstack()
        lhs, rhs = df.align(df.T)
        df = lhs.add(rhs, fill_value=0).fillna(0)

        label_texts = df.columns.values

        def label_ticks(label_texts):
            import wbia.plottool as pt

            truncated_labels = [repr(lbl[0:100]) for lbl in label_texts]
            ax = pt.gca()
            ax.set_xticks(list(range(len(label_texts))))
            ax.set_xticklabels(truncated_labels)
            [lbl.set_rotation(-55) for lbl in ax.get_xticklabels()]
            [
                lbl.set_horizontalalignment('left')
                for lbl in ax.get_xticklabels()
            ]

            # xgrid, ygrid = np.meshgrid(range(len(label_texts)), range(len(label_texts)))
            # pt.plot_surface3d(xgrid, ygrid, disjoint_mat)
            ax.set_yticks(list(range(len(label_texts))))
            ax.set_yticklabels(truncated_labels)
            [
                lbl.set_horizontalalignment('right')
                for lbl in ax.get_yticklabels()
            ]
            [
                lbl.set_verticalalignment('center')
                for lbl in ax.get_yticklabels()
            ]
            # [lbl.set_rotation(20) for lbl in ax.get_yticklabels()]

        # df = df.sort(axis=0)
        # df = df.sort(axis=1)

        sortx = np.argsort(df.sum(axis=1).values)[::-1]
        df = df.take(sortx, axis=0)
        df = df.take(sortx, axis=1)

        fig = pt.figure(fnum=1)
        fig.clf()
        mat = df.values.astype(np.int32)
        mat[np.diag_indices(len(mat))] = 0
        vmax = mat[(1 - np.eye(len(mat))).astype(np.bool)].max()
        import matplotlib.colors

        norm = matplotlib.colors.Normalize(vmin=0, vmax=vmax, clip=True)
        pt.plt.imshow(mat, cmap='hot', norm=norm, interpolation='none')
        pt.plt.colorbar()
        pt.plt.grid('off')
        label_ticks(label_texts)
        fig.tight_layout()

    # overlap_df = pd.DataFrame.from_dict(overlap_img_list)

    class TmpImage(ut.NiceRepr):
        pass

    from skimage.feature import hog
    from skimage import data, color, exposure
    import wbia.plottool as pt

    image2 = color.rgb2gray(data.astronaut())  # NOQA

    fpath = './GOPR1120.JPG'

    import vtool as vt

    for fpath in [fpath]:
        """
        http://scikit-image.org/docs/dev/auto_examples/plot_hog.html
        """

        image = vt.imread(fpath, grayscale=True)
        image = pt.color_funcs.to_base01(image)

        fig = pt.figure(fnum=2)
        fd, hog_image = hog(
            image,
            orientations=8,
            pixels_per_cell=(16, 16),
            cells_per_block=(1, 1),
            visualise=True,
        )

        fig, (ax1, ax2) = pt.plt.subplots(1,
                                          2,
                                          figsize=(8, 4),
                                          sharex=True,
                                          sharey=True)

        ax1.axis('off')
        ax1.imshow(image, cmap=pt.plt.cm.gray)
        ax1.set_title('Input image')
        ax1.set_adjustable('box-forced')

        # Rescale histogram for better display
        hog_image_rescaled = exposure.rescale_intensity(hog_image,
                                                        in_range=(0, 0.02))

        ax2.axis('off')
        ax2.imshow(hog_image_rescaled, cmap=pt.plt.cm.gray)
        ax2.set_title('Histogram of Oriented Gradients')
        ax1.set_adjustable('box-forced')
        pt.plt.show()
Esempio n. 8
0
def detect_sharks(ibs, gids):
    # import wbia
    # ibs = wbia.opendb('WS_ALL')
    config = {
        'algo':
        'yolo',
        'sensitivity':
        0.2,
        'config_filepath':
        ut.truepath('~/work/WS_ALL/localizer_backup/detect.yolo.2.cfg'),
        'weight_filepath':
        ut.truepath(
            '~/work/WS_ALL/localizer_backup/detect.yolo.2.39000.weights'),
        'class_filepath':
        ut.truepath(
            '~/work/WS_ALL/localizer_backup/detect.yolo.2.cfg.classes'),
    }
    depc = ibs.depc_image

    # imgsets = ibs.imagesets(text='Injured Sharks')
    # images = ibs.images(imgsets.gids[0])
    images = ibs.images(gids)
    images = images.compress([ext not in ['.gif'] for ext in images.exts])
    gid_list = images.gids

    # result is a tuple:
    # (score, bbox_list, theta_list, conf_list, class_list)
    results_list = depc.get_property('localizations',
                                     gid_list,
                                     None,
                                     config=config)

    results_list2 = []
    multi_gids = []
    failed_gids = []

    # ibs.set_image_imagesettext(failed_gids, ['Fixme'] * len(failed_gids))
    ibs.set_image_imagesettext(multi_gids, ['Fixme2'] * len(multi_gids))

    failed_gids

    for gid, res in zip(gid_list, results_list):
        score, bbox_list, theta_list, conf_list, class_list = res
        if len(bbox_list) == 0:
            failed_gids.append(gid)
        elif len(bbox_list) == 1:
            results_list2.append((gid, bbox_list, theta_list))
        elif len(bbox_list) > 1:
            multi_gids.append(gid)
            idx = conf_list.argmax()
            res2 = (gid, bbox_list[idx:idx + 1], theta_list[idx:idx + 1])
            results_list2.append(res2)

    ut.dict_hist(([t[1].shape[0] for t in results_list]))

    localized_imgs = ibs.images(ut.take_column(results_list2, 0))
    assert all([len(a) == 1 for a in localized_imgs.aids])
    old_annots = ibs.annots(ut.flatten(localized_imgs.aids))
    # old_tags = old_annots.case_tags

    # Override old bboxes
    import numpy as np

    bboxes = np.array(ut.take_column(results_list2, 1))[:, 0, :]
    ibs.set_annot_bboxes(old_annots.aids, bboxes)

    if False:
        import wbia.plottool as pt

        pt.qt4ensure()

        inter = pt.MultiImageInteraction(
            ibs.get_image_paths(ut.take_column(results_list2, 0)),
            bboxes_list=ut.take_column(results_list2, 1),
        )
        inter.dump_to_disk('shark_loc', num=50, prefix='shark_loc')
        inter.start()

        inter = pt.MultiImageInteraction(ibs.get_image_paths(failed_gids))
        inter.start()

        inter = pt.MultiImageInteraction(ibs.get_image_paths(multi_gids))
        inter.start()