示例#1
0
 def add_ibeis_support(nnindexer, qreq_, new_daid_list,
                       verbose=ut.NOT_QUIET):
     r"""
     # TODO: ensure that the memcache changes appropriately
     """
     from ibeis.algo.hots.neighbor_index import clear_memcache
     clear_memcache()
     if verbose:
         print('[nnindex] request add %d annots to single-indexer' % (
             len(new_daid_list)))
     indexed_aids = nnindexer.get_indexed_aids()
     duplicate_aids = set(new_daid_list).intersection(indexed_aids)
     if len(duplicate_aids) > 0:
         if verbose:
             print(('[nnindex] request has %d annots that are already '
                    'indexed. ignore those') % (len(duplicate_aids),))
         new_daid_list_ = np.array(sorted(list(set(new_daid_list) -
                                               duplicate_aids)))
     else:
         new_daid_list_ = new_daid_list
     if len(new_daid_list_) == 0:
         if verbose:
             print('[nnindex] Nothing to do')
     else:
         tup = get_support_data(qreq_, new_daid_list_)
         new_vecs_list, new_fgws_list, new_fxs_list = tup
         nnindexer.add_support(new_daid_list_, new_vecs_list, new_fgws_list,
                               verbose=verbose)
示例#2
0
 def remove_ibeis_support(nnindexer, qreq_, remove_daid_list, verbose=ut.NOT_QUIET):
     r"""
     # TODO: ensure that the memcache changes appropriately
     """
     if verbose:
         print('[nnindex] request remove %d annots from single-indexer' %
               (len(remove_daid_list)))
     from ibeis.algo.hots.neighbor_index import clear_memcache
     clear_memcache()
     nnindexer.remove_support(remove_daid_list, verbose=verbose)
示例#3
0
 def remove_ibeis_support(nnindexer, qreq_, remove_daid_list,
                          verbose=ut.NOT_QUIET):
     r"""
     # TODO: ensure that the memcache changes appropriately
     """
     if verbose:
         print('[nnindex] request remove %d annots from single-indexer' %
               (len(remove_daid_list)))
     from ibeis.algo.hots.neighbor_index import clear_memcache
     clear_memcache()
     nnindexer.remove_support(remove_daid_list, verbose=verbose)
示例#4
0
 def add_ibeis_support(nnindexer, qreq_, new_daid_list,
                       verbose=ut.NOT_QUIET):
     r"""
     # TODO: ensure that the memcache changes appropriately
     """
     from ibeis.algo.hots.neighbor_index import clear_memcache
     clear_memcache()
     if verbose:
         print('[nnindex] request add %d annots to single-indexer' % (len(new_daid_list)))
     duplicate_aids = set(new_daid_list).intersection(nnindexer.get_indexed_aids())
     if len(duplicate_aids) > 0:
         if verbose:
             print('[nnindex] request has %d annots that are already indexed. ignore those'
                   % (len(duplicate_aids),))
         new_daid_list_ = np.array(sorted(list(set(new_daid_list) - duplicate_aids)))
     else:
         new_daid_list_ = new_daid_list
     if len(new_daid_list_) == 0:
         if verbose:
             print('[nnindex] Nothing to do')
     else:
         new_vecs_list, new_fgws_list = get_support_data(qreq_, new_daid_list_)
         nnindexer.add_support(new_daid_list_, new_vecs_list, new_fgws_list, verbose=verbose)
示例#5
0
def augment_nnindexer_experiment():
    """

    python -c "import utool; print(utool.auto_docstr('ibeis.algo.hots._neighbor_experiment', 'augment_nnindexer_experiment'))"

    References:
        http://answers.opencv.org/question/44592/flann-index-in-python-training-fails-with-segfault/

    CommandLine:
        utprof.py -m ibeis.algo.hots._neighbor_experiment --test-augment_nnindexer_experiment
        python -m ibeis.algo.hots._neighbor_experiment --test-augment_nnindexer_experiment

        python -m ibeis.algo.hots._neighbor_experiment --test-augment_nnindexer_experiment --db PZ_MTEST --diskshow --adjust=.1 --save "augment_experiment_{db}.png" --dpath='.' --dpi=180 --figsize=9,6
        python -m ibeis.algo.hots._neighbor_experiment --test-augment_nnindexer_experiment --db PZ_Master0 --diskshow --adjust=.1 --save "augment_experiment_{db}.png" --dpath='.' --dpi=180 --figsize=9,6 --nosave-flann --show
        python -m ibeis.algo.hots._neighbor_experiment --test-augment_nnindexer_experiment --db PZ_Master0 --diskshow --adjust=.1 --save "augment_experiment_{db}.png" --dpath='.' --dpi=180 --figsize=9,6 --nosave-flann --show


        python -m ibeis.algo.hots._neighbor_experiment --test-augment_nnindexer_experiment --db PZ_Master0 --diskshow --adjust=.1 --save "augment_experiment_{db}.png" --dpath='.' --dpi=180 --figsize=9,6 --nosave-flann --no-api-cache --nocache-uuids

        python -m ibeis.algo.hots._neighbor_experiment --test-augment_nnindexer_experiment --db PZ_MTEST --show
        python -m ibeis.algo.hots._neighbor_experiment --test-augment_nnindexer_experiment --db PZ_Master0 --show

        # RUNS THE SEGFAULTING CASE
        python -m ibeis.algo.hots._neighbor_experiment --test-augment_nnindexer_experiment --db PZ_Master0 --show
        # Debug it
        gdb python
        run -m ibeis.algo.hots._neighbor_experiment --test-augment_nnindexer_experiment --db PZ_Master0 --show
        gdb python
        run -m ibeis.algo.hots._neighbor_experiment --test-augment_nnindexer_experiment --db PZ_Master0 --diskshow --adjust=.1 --save "augment_experiment_{db}.png" --dpath='.' --dpi=180 --figsize=9,6


    Example:
        >>> # DISABLE_DOCTEST
        >>> from ibeis.algo.hots._neighbor_experiment import *  # NOQA
        >>> # execute function
        >>> augment_nnindexer_experiment()
        >>> # verify results
        >>> ut.show_if_requested()

    """
    import ibeis
    # build test data
    #ibs = ibeis.opendb('PZ_MTEST')
    ibs = ibeis.opendb(defaultdb='PZ_Master0')
    if ibs.get_dbname() == 'PZ_MTEST':
        initial = 1
        addition_stride = 4
        max_ceiling = 100
    elif ibs.get_dbname() == 'PZ_Master0':
        initial = 128
        #addition_stride = 64
        #addition_stride = 128
        addition_stride = 256
        max_ceiling = 10000
        #max_ceiling = 4000
        #max_ceiling = 2000
        #max_ceiling = 600
    else:
        assert False
    all_daids = ibs.get_valid_aids(species='zebra_plains')
    qreq_ = ibs.new_query_request(all_daids, all_daids)
    max_num = min(max_ceiling, len(all_daids))

    # Clear Caches
    ibs.delete_flann_cachedir()
    neighbor_index.clear_memcache()
    neighbor_index.clear_uuid_cache(qreq_)

    # Setup
    all_randomize_daids_ = ut.deterministic_shuffle(all_daids[:])
    # ensure all features are computed
    #ibs.get_annot_vecs(all_randomize_daids_, ensure=True)
    #ibs.get_annot_fgweights(all_randomize_daids_, ensure=True)

    nnindexer_list = []
    addition_lbl = 'Addition'
    _addition_iter = list(range(initial + 1, max_num, addition_stride))
    addition_iter = iter(ut.ProgressIter(_addition_iter, lbl=addition_lbl,
                                         freq=1, autoadjust=False))
    time_list_addition = []
    #time_list_reindex = []
    addition_count_list = []
    tmp_cfgstr_list = []

    #for _ in range(80):
    #    next(addition_iter)
    try:
        memtrack = ut.MemoryTracker(disable=False)
        for count in addition_iter:
            aid_list_ = all_randomize_daids_[0:count]
            # Request an indexer which could be an augmented version of an existing indexer.
            with ut.Timer(verbose=False) as t:
                memtrack.report('BEFORE AUGMENT')
                nnindexer_ = neighbor_index.request_augmented_ibeis_nnindexer(qreq_, aid_list_)
                memtrack.report('AFTER AUGMENT')
            nnindexer_list.append(nnindexer_)
            addition_count_list.append(count)
            time_list_addition.append(t.ellapsed)
            tmp_cfgstr_list.append(nnindexer_.cfgstr)
            print('===============\n\n')
        print(ut.list_str(time_list_addition))
        print(ut.list_str(list(map(id, nnindexer_list))))
        print(ut.list_str(tmp_cfgstr_list))
        print(ut.list_str(list([nnindxer.cfgstr for nnindxer in nnindexer_list])))

        IS_SMALL = False

        if IS_SMALL:
            nnindexer_list = []
        reindex_label = 'Reindex'
        # go backwards for reindex
        _reindex_iter = list(range(initial + 1, max_num, addition_stride))[::-1]
        reindex_iter = ut.ProgressIter(_reindex_iter, lbl=reindex_label)
        time_list_reindex = []
        #time_list_reindex = []
        reindex_count_list = []

        for count in reindex_iter:
            print('\n+===PREDONE====================\n')
            # check only a single size for memory leaks
            #count = max_num // 16 + ((x % 6) * 1)
            #x += 1

            aid_list_ = all_randomize_daids_[0:count]
            # Call the same code, but force rebuilds
            memtrack.report('BEFORE REINDEX')
            with ut.Timer(verbose=False) as t:
                nnindexer_ = neighbor_index.request_augmented_ibeis_nnindexer(
                    qreq_, aid_list_, force_rebuild=True, memtrack=memtrack)
            memtrack.report('AFTER REINDEX')
            ibs.print_cachestats_str()
            print('[nnindex.MEMCACHE] size(NEIGHBOR_CACHE) = %s' % (
                ut.get_object_size_str(neighbor_index.NEIGHBOR_CACHE.items()),))
            print('[nnindex.MEMCACHE] len(NEIGHBOR_CACHE) = %s' % (
                len(neighbor_index.NEIGHBOR_CACHE.items()),))
            print('[nnindex.MEMCACHE] size(UUID_MAP_CACHE) = %s' % (
                ut.get_object_size_str(neighbor_index.UUID_MAP_CACHE),))
            print('totalsize(nnindexer) = ' + ut.get_object_size_str(nnindexer_))
            memtrack.report_type(neighbor_index.NeighborIndex)
            ut.print_object_size_tree(nnindexer_, lbl='nnindexer_')
            if IS_SMALL:
                nnindexer_list.append(nnindexer_)
            reindex_count_list.append(count)
            time_list_reindex.append(t.ellapsed)
            #import cv2
            #import matplotlib as mpl
            #print(mem_top.mem_top(limit=30, width=120,
            #                      #exclude_refs=[cv2.__dict__, mpl.__dict__]
            #     ))
            print('L___________________\n\n\n')
        print(ut.list_str(time_list_reindex))
        if IS_SMALL:
            print(ut.list_str(list(map(id, nnindexer_list))))
            print(ut.list_str(list([nnindxer.cfgstr for nnindxer in nnindexer_list])))
    except KeyboardInterrupt:
            print('\n[train] Caught CRTL+C')
            resolution = ''
            from six.moves import input
            while not (resolution.isdigit()):
                print('\n[train] What do you want to do?')
                print('[train]     0 - Continue')
                print('[train]     1 - Embed')
                print('[train]  ELSE - Stop network training')
                resolution = input('[train] Resolution: ')
            resolution = int(resolution)
            # We have a resolution
            if resolution == 0:
                print('resuming training...')
            elif resolution == 1:
                ut.embed()

    import plottool as pt

    next_fnum = iter(range(0, 1)).next  # python3 PY3
    pt.figure(fnum=next_fnum())
    if len(addition_count_list) > 0:
        pt.plot2(addition_count_list, time_list_addition, marker='-o', equal_aspect=False,
                 x_label='num_annotations', label=addition_lbl + ' Time')

    if len(reindex_count_list) > 0:
        pt.plot2(reindex_count_list, time_list_reindex, marker='-o', equal_aspect=False,
                 x_label='num_annotations', label=reindex_label + ' Time')

    pt.set_figtitle('Augmented indexer experiment')

    pt.legend()