Exemple #1
0
    def get_patches(inva, wx, ibs, verbose=True):
        """
        Loads the patches assigned to a particular word in this stack

        >>> inva.wx_to_aids = inva.compute_inverted_list()
        >>> verbose=True
        """
        config = inva.config
        aid_list = inva.wx_to_aids[wx]
        X_list = [inva.get_annot(aid) for aid in aid_list]
        fxs_groups = [X.fxs(wx) for X in X_list]
        all_kpts_list = ibs.depc.d.get_feat_kpts(aid_list, config=config)
        sub_kpts_list = vt.ziptake(all_kpts_list, fxs_groups, axis=0)
        total_patches = sum(ut.lmap(len, fxs_groups))

        chip_list = ibs.depc_annot.d.get_chips_img(aid_list, config=config)
        # convert to approprate colorspace
        # if colorspace is not None:
        #    chip_list = vt.convert_image_list_colorspace(chip_list, colorspace)
        # ut.print_object_size(chip_list, 'chip_list')

        patch_size = 64
        shape = (total_patches, patch_size, patch_size, 3)
        _prog = ut.ProgPartial(enabled=verbose, lbl='warping patches', bs=True)
        _patchiter = ut.iflatten(
            [
                vt.get_warped_patches(chip, kpts, patch_size=patch_size)[0]
                # vt.get_warped_patches(chip, kpts, patch_size=patch_size, use_cpp=True)[0]
                for chip, kpts in _prog(
                    zip(chip_list, sub_kpts_list), length=len(aid_list)
                )
            ]
        )
        word_patches = vt.fromiter_nd(_patchiter, shape, dtype=np.uint8)
        return word_patches
Exemple #2
0
    def get_patches(invassign, wx):
        ax_list = invassign.wx2_axs[wx]
        fx_list = invassign.wx2_fxs[wx]
        config = invassign.fstack.config
        ibs = invassign.fstack.ibs

        unique_axs, groupxs = vt.group_indices(ax_list)
        fxs_groups = vt.apply_grouping(fx_list, groupxs)

        unique_aids = ut.take(invassign.fstack.ax2_aid, unique_axs)

        all_kpts_list = ibs.depc.d.get_feat_kpts(unique_aids, config=config)
        sub_kpts_list = vt.ziptake(all_kpts_list, fxs_groups, axis=0)

        chip_list = ibs.depc_annot.d.get_chips_img(unique_aids)
        # convert to approprate colorspace
        #if colorspace is not None:
        #    chip_list = vt.convert_image_list_colorspace(chip_list, colorspace)
        # ut.print_object_size(chip_list, 'chip_list')
        patch_size = 64
        grouped_patches_list = [
            vt.get_warped_patches(chip, kpts, patch_size=patch_size)[0]
            for chip, kpts in ut.ProgIter(zip(chip_list, sub_kpts_list),
                                          nTotal=len(unique_aids),
                                          lbl='warping patches')
        ]
        # Make it correspond with original fx_list and ax_list
        word_patches = vt.invert_apply_grouping(grouped_patches_list, groupxs)
        return word_patches
Exemple #3
0
    def get_patches(invassign, wx):
        ax_list = invassign.wx2_axs[wx]
        fx_list = invassign.wx2_fxs[wx]
        config = invassign.fstack.config
        ibs = invassign.fstack.ibs

        unique_axs, groupxs = vt.group_indices(ax_list)
        fxs_groups = vt.apply_grouping(fx_list, groupxs)

        unique_aids = ut.take(invassign.fstack.ax2_aid, unique_axs)

        all_kpts_list = ibs.depc.d.get_feat_kpts(unique_aids, config=config)
        sub_kpts_list = vt.ziptake(all_kpts_list, fxs_groups, axis=0)

        chip_list = ibs.depc_annot.d.get_chips_img(unique_aids)
        # convert to approprate colorspace
        #if colorspace is not None:
        #    chip_list = vt.convert_image_list_colorspace(chip_list, colorspace)
        # ut.print_object_size(chip_list, 'chip_list')
        patch_size = 64
        grouped_patches_list = [
            vt.get_warped_patches(chip, kpts, patch_size=patch_size)[0]
            for chip, kpts in ut.ProgIter(zip(chip_list, sub_kpts_list),
                                          nTotal=len(unique_aids),
                                          lbl='warping patches')
        ]
        # Make it correspond with original fx_list and ax_list
        word_patches = vt.invert_apply_grouping(grouped_patches_list, groupxs)
        return word_patches
Exemple #4
0
def get_name_shortlist_aids(
    daid_list,
    dnid_list,
    annot_score_list,
    name_score_list,
    nid2_nidx,
    nNameShortList,
    nAnnotPerName,
):
    r"""
    CommandLine:
        python -m wbia.algo.hots.scoring --test-get_name_shortlist_aids

    Example:
        >>> # ENABLE_DOCTEST
        >>> from wbia.algo.hots.scoring import *  # NOQA
        >>> daid_list        = np.array([11, 12, 13, 14, 15, 16, 17])
        >>> dnid_list        = np.array([21, 21, 21, 22, 22, 23, 24])
        >>> annot_score_list = np.array([ 6,  2,  3,  5,  6,  3,  2])
        >>> name_score_list  = np.array([ 8,  9,  5,  4])
        >>> nid2_nidx        = {21:0, 22:1, 23:2, 24:3}
        >>> nNameShortList, nAnnotPerName = 3, 2
        >>> args = (daid_list, dnid_list, annot_score_list, name_score_list,
        ...         nid2_nidx, nNameShortList, nAnnotPerName)
        >>> top_daids = get_name_shortlist_aids(*args)
        >>> result = str(top_daids)
        >>> print(result)
        [15, 14, 11, 13, 16]
    """
    unique_nids, groupxs = vt.group_indices(np.array(dnid_list))
    grouped_annot_scores = vt.apply_grouping(annot_score_list, groupxs)
    grouped_daids = vt.apply_grouping(np.array(daid_list), groupxs)
    # Ensure name score list is aligned with the unique_nids
    aligned_name_score_list = name_score_list.take(
        ut.dict_take(nid2_nidx, unique_nids))
    # Sort each group by the name score
    group_sortx = aligned_name_score_list.argsort()[::-1]
    _top_daid_groups = ut.take(grouped_daids, group_sortx)
    _top_annot_score_groups = ut.take(grouped_annot_scores, group_sortx)
    top_daid_groups = ut.listclip(_top_daid_groups, nNameShortList)
    top_annot_score_groups = ut.listclip(_top_annot_score_groups,
                                         nNameShortList)
    # Sort within each group by the annotation score
    top_daid_sortx_groups = [
        annot_score_group.argsort()[::-1]
        for annot_score_group in top_annot_score_groups
    ]
    top_sorted_daid_groups = vt.ziptake(top_daid_groups, top_daid_sortx_groups)
    top_clipped_daids = [
        ut.listclip(sorted_daid_group, nAnnotPerName)
        for sorted_daid_group in top_sorted_daid_groups
    ]
    top_daids = ut.flatten(top_clipped_daids)
    return top_daids
Exemple #5
0
def get_name_shortlist_aids(daid_list, dnid_list, annot_score_list,
                            name_score_list, nid2_nidx,
                            nNameShortList, nAnnotPerName):
    r"""
    CommandLine:
        python -m ibeis.algo.hots.scoring --test-get_name_shortlist_aids

    Example:
        >>> # ENABLE_DOCTEST
        >>> from ibeis.algo.hots.scoring import *  # NOQA
        >>> # build test data
        >>> daid_list        = np.array([11, 12, 13, 14, 15, 16, 17])
        >>> dnid_list        = np.array([21, 21, 21, 22, 22, 23, 24])
        >>> annot_score_list = np.array([ 6,  2,  3,  5,  6,  3,  2])
        >>> name_score_list  = np.array([ 8,  9,  5,  4])
        >>> nid2_nidx        = {21:0, 22:1, 23:2, 24:3}
        >>> nNameShortList, nAnnotPerName = 3, 2
        >>> # execute function
        >>> args = (daid_list, dnid_list, annot_score_list, name_score_list,
        ...         nid2_nidx, nNameShortList, nAnnotPerName)
        >>> top_daids = get_name_shortlist_aids(*args)
        >>> # verify results
        >>> result = str(top_daids)
        >>> print(result)
        [15, 14, 11, 13, 16]
    """
    unique_nids, groupxs    = vt.group_indices(np.array(dnid_list))
    grouped_annot_scores    = vt.apply_grouping(annot_score_list, groupxs)
    grouped_daids           = vt.apply_grouping(np.array(daid_list), groupxs)
    # Ensure name score list is aligned with the unique_nids
    aligned_name_score_list = name_score_list.take(ut.dict_take(nid2_nidx, unique_nids))
    # Sort each group by the name score
    group_sortx             = aligned_name_score_list.argsort()[::-1]
    _top_daid_groups        = ut.take(grouped_daids, group_sortx)
    _top_annot_score_groups = ut.take(grouped_annot_scores, group_sortx)
    top_daid_groups         = ut.listclip(_top_daid_groups, nNameShortList)
    top_annot_score_groups  = ut.listclip(_top_annot_score_groups, nNameShortList)
    # Sort within each group by the annotation score
    top_daid_sortx_groups   = [annot_score_group.argsort()[::-1]
                               for annot_score_group in top_annot_score_groups]
    top_sorted_daid_groups  = vt.ziptake(top_daid_groups, top_daid_sortx_groups)
    top_clipped_daids = [ut.listclip(sorted_daid_group, nAnnotPerName)
                         for sorted_daid_group in top_sorted_daid_groups]
    top_daids = ut.flatten(top_clipped_daids)
    return top_daids
Exemple #6
0
def hackshow_names(ibs, aid_list, fnum=None):
    r"""
    Args:
        ibs (IBEISController):  wbia controller object
        aid_list (list):

    CommandLine:
        python -m wbia.other.dbinfo --exec-hackshow_names --show
        python -m wbia.other.dbinfo --exec-hackshow_names --show --db PZ_Master1

    Example:
        >>> # DISABLE_DOCTEST
        >>> from wbia.other.dbinfo import *  # NOQA
        >>> import wbia
        >>> ibs = wbia.opendb(defaultdb='PZ_MTEST')
        >>> aid_list = ibs.get_valid_aids()
        >>> result = hackshow_names(ibs, aid_list)
        >>> print(result)
        >>> ut.show_if_requested()
    """
    import wbia.plottool as pt
    import vtool as vt

    grouped_aids, nid_list = ibs.group_annots_by_name(aid_list)
    grouped_aids = [aids for aids in grouped_aids if len(aids) > 1]
    unixtimes_list = ibs.unflat_map(ibs.get_annot_image_unixtimes_asfloat, grouped_aids)
    yaws_list = ibs.unflat_map(ibs.get_annot_yaws, grouped_aids)
    # markers_list = [[(1, 2, yaw * 360 / (np.pi * 2)) for yaw in yaws] for yaws in yaws_list]

    unixtime_list = ut.flatten(unixtimes_list)
    timemax = np.nanmax(unixtime_list)
    timemin = np.nanmin(unixtime_list)
    timerange = timemax - timemin
    unixtimes_list = [
        ((unixtimes[:] - timemin) / timerange) for unixtimes in unixtimes_list
    ]
    for unixtimes in unixtimes_list:
        num_nan = sum(np.isnan(unixtimes))
        unixtimes[np.isnan(unixtimes)] = np.linspace(-1, -0.5, num_nan)
    # ydata_list = [np.arange(len(aids)) for aids in grouped_aids]
    sortx_list = vt.argsort_groups(unixtimes_list, reverse=False)
    # markers_list = ut.list_ziptake(markers_list, sortx_list)
    yaws_list = ut.list_ziptake(yaws_list, sortx_list)
    ydatas_list = vt.ziptake(unixtimes_list, sortx_list)
    # ydatas_list = sortx_list
    # ydatas_list = vt.argsort_groups(unixtimes_list, reverse=False)

    # Sort by num members
    # ydatas_list = ut.take(ydatas_list, np.argsort(list(map(len, ydatas_list))))
    xdatas_list = [
        np.zeros(len(ydatas)) + count for count, ydatas in enumerate(ydatas_list)
    ]
    # markers = ut.flatten(markers_list)
    # yaws = np.array(ut.flatten(yaws_list))
    y_data = np.array(ut.flatten(ydatas_list))
    x_data = np.array(ut.flatten(xdatas_list))
    fnum = pt.ensure_fnum(fnum)
    pt.figure(fnum=fnum)
    ax = pt.gca()

    # unique_yaws, groupxs = vt.group_indices(yaws)

    ax.scatter(x_data, y_data, color=[1, 0, 0], s=1, marker='.')
    # pt.draw_stems(x_data, y_data, marker=markers, setlims=True, linestyle='')
    pt.dark_background()
    ax = pt.gca()
    ax.set_xlim(min(x_data) - 0.1, max(x_data) + 0.1)
    ax.set_ylim(min(y_data) - 0.1, max(y_data) + 0.1)
Exemple #7
0
def get_support_data(qreq_, daid_list):
    """

    CommandLine:
        python -m wbia.algo.hots.neighbor_index get_support_data --show

    Example:
        >>> # xdoctest: +REQUIRES(module:wbia_cnn)
        >>> from wbia.algo.hots.neighbor_index import *  # NOQA
        >>> import wbia
        >>> qreq_ = wbia.testdata_qreq_(defaultdb='PZ_MTEST', p=':fgw_thresh=.9,maxscale_thresh=10', a=':size=2')
        >>> daid_list = qreq_.daids
        >>> tup  = get_support_data(qreq_, daid_list)
        >>> vecs_list, fgws_list, fxs_list = tup
        >>> assert all([np.all(fgws > .9) for fgws in fgws_list])
        >>> result = ('depth_profile = %r' % (ut.depth_profile(tup),))
        >>> print(result)

        depth_profile = [[(128, 128), (174, 128)], [128, 174], [128, 174]]

        I can't figure out why this tests isn't determenistic all the time and
        I can't get it to reproduce non-determenism.

        This could be due to theano.

        depth_profile = [[(39, 128), (22, 128)], [39, 22], [39, 22]]
        depth_profile = [[(35, 128), (24, 128)], [35, 24], [35, 24]]
        depth_profile = [[(34, 128), (31, 128)], [34, 31], [34, 31]]
        depth_profile = [[(83, 128), (129, 128)], [83, 129], [83, 129]]
        depth_profile = [[(13, 128), (104, 128)], [13, 104], [13, 104]]
    """
    config2_ = qreq_.get_internal_data_config2()
    vecs_list = qreq_.ibs.get_annot_vecs(daid_list, config2_=config2_)
    # Create corresponding feature indicies
    fxs_list = [np.arange(len(vecs)) for vecs in vecs_list]
    # <HACK:featweight>
    # hack to get  feature weights. returns None if feature weights are turned
    # off in config settings

    if config2_.minscale_thresh is not None or config2_.maxscale_thresh is not None:
        min_ = -np.inf if config2_.minscale_thresh is None else config2_.minscale_thresh
        max_ = np.inf if config2_.maxscale_thresh is None else config2_.maxscale_thresh
        kpts_list = qreq_.ibs.get_annot_kpts(daid_list, config2_=config2_)
        # kpts_list = vt.ziptake(kpts_list, fxs_list, axis=0)  # not needed for first filter
        scales_list = [vt.get_scales(kpts) for kpts in kpts_list]
        # Remove data under the threshold
        flags_list = [
            np.logical_and(scales >= min_, scales <= max_)
            for scales in scales_list
        ]
        vecs_list = vt.zipcompress(vecs_list, flags_list, axis=0)
        fxs_list = vt.zipcompress(fxs_list, flags_list, axis=0)

    if qreq_.qparams.fg_on:
        # I've found that the call to get_annot_fgweights is different on
        # different machines.  Something must be configured differently.
        fgws_list = qreq_.ibs.get_annot_fgweights(daid_list,
                                                  config2_=config2_,
                                                  ensure=True)
        fgws_list = vt.ziptake(fgws_list, fxs_list, axis=0)
        # assert list(map(len, fgws_list)) == list(map(len, vecs_list)), 'bad corresponding vecs'
        if config2_.fgw_thresh is not None and config2_.fgw_thresh > 0:
            flags_list = [fgws > config2_.fgw_thresh for fgws in fgws_list]
            # Remove data under the threshold
            fgws_list = vt.zipcompress(fgws_list, flags_list, axis=0)
            vecs_list = vt.zipcompress(vecs_list, flags_list, axis=0)
            fxs_list = vt.zipcompress(fxs_list, flags_list, axis=0)
    else:
        fgws_list = None
    # </HACK:featweight>
    return vecs_list, fgws_list, fxs_list
Exemple #8
0
def hackshow_names(ibs, aid_list, fnum=None):
    r"""
    Args:
        ibs (IBEISController):  ibeis controller object
        aid_list (list):

    CommandLine:
        python -m ibeis.other.dbinfo --exec-hackshow_names --show
        python -m ibeis.other.dbinfo --exec-hackshow_names --show --db PZ_Master1

    Example:
        >>> # DISABLE_DOCTEST
        >>> from ibeis.other.dbinfo import *  # NOQA
        >>> import ibeis
        >>> ibs = ibeis.opendb(defaultdb='PZ_MTEST')
        >>> aid_list = ibs.get_valid_aids()
        >>> result = hackshow_names(ibs, aid_list)
        >>> print(result)
        >>> ut.show_if_requested()
    """
    import plottool as pt
    import vtool as vt
    grouped_aids, nid_list = ibs.group_annots_by_name(aid_list)
    grouped_aids = [aids for aids in grouped_aids if len(aids) > 1]
    unixtimes_list = ibs.unflat_map(ibs.get_annot_image_unixtimes_asfloat, grouped_aids)
    yaws_list = ibs.unflat_map(ibs.get_annot_yaws, grouped_aids)
    #markers_list = [[(1, 2, yaw * 360 / (np.pi * 2)) for yaw in yaws] for yaws in yaws_list]

    unixtime_list = ut.flatten(unixtimes_list)
    timemax = np.nanmax(unixtime_list)
    timemin = np.nanmin(unixtime_list)
    timerange = timemax - timemin
    unixtimes_list = [((unixtimes[:] - timemin) / timerange) for unixtimes in unixtimes_list]
    for unixtimes in unixtimes_list:
        num_nan = sum(np.isnan(unixtimes))
        unixtimes[np.isnan(unixtimes)] = np.linspace(-1, -.5, num_nan)
    #ydata_list = [np.arange(len(aids)) for aids in grouped_aids]
    sortx_list = vt.argsort_groups(unixtimes_list, reverse=False)
    #markers_list = ut.list_ziptake(markers_list, sortx_list)
    yaws_list = ut.list_ziptake(yaws_list, sortx_list)
    ydatas_list = vt.ziptake(unixtimes_list, sortx_list)
    #ydatas_list = sortx_list
    #ydatas_list = vt.argsort_groups(unixtimes_list, reverse=False)

    # Sort by num members
    #ydatas_list = ut.take(ydatas_list, np.argsort(list(map(len, ydatas_list))))
    xdatas_list = [np.zeros(len(ydatas)) + count for count, ydatas in enumerate(ydatas_list)]
    #markers = ut.flatten(markers_list)
    #yaws = np.array(ut.flatten(yaws_list))
    y_data = np.array(ut.flatten(ydatas_list))
    x_data = np.array(ut.flatten(xdatas_list))
    fnum = pt.ensure_fnum(fnum)
    pt.figure(fnum=fnum)
    ax = pt.gca()

    #unique_yaws, groupxs = vt.group_indices(yaws)

    ax.scatter(x_data, y_data, color=[1, 0, 0], s=1, marker='.')
    #pt.draw_stems(x_data, y_data, marker=markers, setlims=True, linestyle='')
    pt.dark_background()
    ax = pt.gca()
    ax.set_xlim(min(x_data) - .1, max(x_data) + .1)
    ax.set_ylim(min(y_data) - .1, max(y_data) + .1)