Esempio n. 1
0
def load_query_results(hs, qcx_list, force_load=False):
    query_cfg = hs.prefs.query_cfg
    # Build query big cache uid
    query_uid = query_cfg.get_uid()
    hs_uid    = hs.get_db_name()
    qcxs_uid  = helpers.hashstr(tuple(qcx_list))
    qres_uid  = hs_uid + query_uid + qcxs_uid
    cache_dir = join(hs.dirs.cache_dir, 'query_results_bigcache')
    print('\n===============')
    print('\n[rr2] Load Query Results')
    print('[rr2] load_query_results(): %r' % qres_uid)
    io_kwargs = dict(dpath=cache_dir, fname='query_results', uid=qres_uid, ext='.cPkl')
    # Return cache if available
    if not hs.args.nocache_query and (not force_load):
        qcx2_res = io.smart_load(**io_kwargs)
        if qcx2_res is not None:
            print('[rr2] load_query_results(): cache hit')
            return qcx2_res
        print('[rr2] load_query_results(): cache miss')
    else:
        print('[rr2] load_query_results(): cache off')
    # Individually load / compute queries
    qcx2_res = [hs.query(qcx) for qcx in qcx_list]
    # Save to the cache
    print('[rr2] Saving query_results to bigcache: %r' % qres_uid)
    helpers.ensuredir(cache_dir)
    io.smart_save(qcx2_res, **io_kwargs)
    return qcx2_res
Esempio n. 2
0
def load_query_results(hs, qcx_list, force_load=False):
    query_cfg = hs.prefs.query_cfg
    # Build query big cache uid
    query_uid = query_cfg.get_uid()
    hs_uid = hs.get_db_name()
    qcxs_uid = helpers.hashstr(tuple(qcx_list))
    qres_uid = hs_uid + query_uid + qcxs_uid
    cache_dir = join(hs.dirs.cache_dir, 'query_results_bigcache')
    print('\n===============')
    print('\n[rr2] Load Query Results')
    print('[rr2] load_query_results(): %r' % qres_uid)
    io_kwargs = dict(dpath=cache_dir,
                     fname='query_results',
                     uid=qres_uid,
                     ext='.cPkl')
    # Return cache if available
    if not hs.args.nocache_query and (not force_load):
        qcx2_res = io.smart_load(**io_kwargs)
        if qcx2_res is not None:
            print('[rr2] load_query_results(): cache hit')
            return qcx2_res
        print('[rr2] load_query_results(): cache miss')
    else:
        print('[rr2] load_query_results(): cache off')
    # Individually load / compute queries
    qcx2_res = [hs.query(qcx) for qcx in qcx_list]
    # Save to the cache
    print('[rr2] Saving query_results to bigcache: %r' % qres_uid)
    helpers.ensuredir(cache_dir)
    io.smart_save(qcx2_res, **io_kwargs)
    return qcx2_res
Esempio n. 3
0
def delete_suffixed_images(hs, back):
    remove_cands = []
    gx2_gname = hs.tables.gx2_gname

    # Check to see if the image is a copy of another
    for gx, gname in enumerate(gx2_gname):
        name, ext = splitext(gname)
        components = name.split('_')
        if len(components) == 2:
            orig_name, copynum = components
            orig_gname = orig_name + ext
            copyof = np.where(gx2_gname == orig_gname)[0]
            if len(copyof) > 0:
                remove_cands.append((gx, copyof))

    # Make sure the images are actually duplicates
    remove_gxs = []
    orphaned_cxs = []
    for copy_gx, orig_gx in remove_cands:
        if isinstance(orig_gx, np.ndarray):
            orig_gx = orig_gx[0]
        if np.all(hs.gx2_image(copy_gx) == hs.gx2_image(orig_gx)):
            print('[script] duplicate found copy_gx=%r, orig_gx=%r' % (copy_gx, orig_gx))
            remove_gxs.append(copy_gx)
            copy_cxs = hs.gx2_cxs(copy_gx)
            orphaned_cxs.append((copy_cxs, orig_gx))

    # THESE ACTUALLY MODIFY THE DATABASE

    # Move all chips to the original
    for cx_list, orig_gx in orphaned_cxs:
        for cx in cx_list:
            print('[script] relocate cx=%r to gx=%r' % (cx, orig_gx))
            hs.tables.cx2_gx[cx] = orig_gx

    # Move deleted images into the trash
    trash_dir = join(hs.dirs.db_dir, 'deleted-images')
    src_list = hs.gx2_gname(remove_gxs, full=True)
    dst_list = hs.gx2_gname(remove_gxs, prefix=trash_dir)
    helpers.ensuredir(trash_dir)

    move_list = zip(src_list, dst_list)
    mark_progress, end_prog = helpers.progress_func(len(move_list), lbl='Trashing Image')
    for count, (src, dst) in enumerate(move_list):
        shutil.move(src, dst)
        mark_progress(count)
    end_prog()

    for gx in remove_gxs:
        print('[script] remove gx=%r' % (gx,))
        hs.tables.gx2_gname[gx] = ''

    # Update and save
    hs.update_samples()
    back.populate_image_table()

    hs.save_database()
    return locals()
Esempio n. 4
0
def cache_test_results(qx2_bestranks, hs, qreq, qcxs, dcxs):
    test_uid = qreq.get_query_uid(hs, qcxs)
    cache_dir = join(hs.dirs.cache_dir, 'experiment_harness_results')
    util.ensuredir(cache_dir)
    io_kwargs = {
        'dpath': cache_dir,
        'fname': 'test_results',
        'uid': test_uid,
        'ext': '.cPkl'
    }
    io.smart_save(qx2_bestranks, **io_kwargs)
Esempio n. 5
0
def extract_detector_negatives(hs, output_dir, batch_extract_kwargs):
    from itertools import product as iprod
    negreg_dir = join(output_dir, 'negatives', 'regions')
    negall_dir = join(output_dir, 'negatives', 'whole')
    negreg_fmt = join(negreg_dir, 'gx%d_wix%d_hix%d_neg.png')
    negall_fmt = join(negall_dir, 'gx%d_all_neg.png')
    helpers.ensuredir(negall_dir)
    helpers.ensuredir(negreg_dir)

    print('[train] extract_negatives')
    gx_list = hs.get_valid_gxs()
    nChips_list = np.array(hs.gx2_nChips(gx_list))
    aif_list = np.array(hs.gx2_aif(gx_list))

    # Find images where there are completely negative. They have no animals.
    #is_negative = np.logical_and(aif_list, nChips_list)
    is_completely_negative = np.logical_and(aif_list, nChips_list == 0)
    negall_gxs = gx_list[np.where(is_completely_negative)[0]]

    gfpath_list = []
    cfpath_list = []
    roi_list = []

    def add_neg_eg(roi, gfpath, cfpath):
        roi_list.append(roi)
        gfpath_list.append(gfpath)
        cfpath_list.append(cfpath)

    width_split = 2
    (uw, uh) = batch_extract_kwargs['uniform_size']

    for gx in negall_gxs:
        gfpath = hs.gx2_gname(gx, full=True)
        # Add whole negative image
        (gw, gh) = hs.gx2_image_size(gx)
        roi = (0, 0, gw, gh)
        add_neg_eg(roi, gfpath, negall_fmt % (gx))
        # Add negative regions
        w_step = gw // width_split
        h_step = int(round(gh * (w_step / gw)))
        nHeights, nWidths  = gh // h_step, gw // w_step
        if nWidths < 2 or nHeights < 1:
            continue
        for wix, hix in iprod(xrange(nWidths), xrange(nHeights)):
            x, y = wix * w_step, hix * h_step
            w, h = w_step, h_step
            roi = (x, y, w, h)
            add_neg_eg(roi, gfpath, negreg_fmt % (gx, wix, hix))

    theta_list = [0] * len(roi_list)

    cc2.batch_extract_chips(gfpath_list, cfpath_list, roi_list, theta_list,
                            **batch_extract_kwargs)
Esempio n. 6
0
def backup_csv_tables(hs, force_backup=False):
    internal_dir = hs.dirs.internal_dir
    backup_dir = join(internal_dir, 'backup_v0.1.0')
    if not exists(backup_dir) or force_backup:
        helpers.ensuredir(backup_dir)
        timestamp = helpers.get_timestamp(use_second=True)

        def do_backup(fname):
            src = join(internal_dir, fname)
            dst_fname = ('%s_bak-' + timestamp + '%s') % splitext(fname)
            dst = join(backup_dir, dst_fname)
            if exists(src):
                shutil.copy(src, dst)
        do_backup(CHIP_TABLE_FNAME)
        do_backup(NAME_TABLE_FNAME)
        do_backup(IMAGE_TABLE_FNAME)
Esempio n. 7
0
def extract_detector_positives(hs, output_dir, batch_extract_kwargs):
    print('[train] extract_positives')
    cx_list    = hs.get_valid_cxs()
    gx_list    = hs.tables.cx2_gx[cx_list]
    cid_list   = hs.tables.cx2_cid[cx_list]
    theta_list = hs.tables.cx2_theta[cx_list]
    roi_list   = hs.tables.cx2_roi[cx_list]
    gfpath_list = hs.gx2_gname(gx_list, full=True)

    posoutput_dir = join(output_dir, 'positives')
    helpers.ensuredir(posoutput_dir)
    pos_fmt = join(posoutput_dir, 'cid%d_gx%d_pos.png')
    cfpath_list = [pos_fmt  % (cid, gx) for (cid, gx) in zip(cid_list, gx_list)]

    cc2.batch_extract_chips(gfpath_list, cfpath_list, roi_list, theta_list,
                            **batch_extract_kwargs)
Esempio n. 8
0
def backup_csv_tables(hs, force_backup=False):
    internal_dir = hs.dirs.internal_dir
    backup_dir = join(internal_dir, 'backup_v0.1.0')
    if not exists(backup_dir) or force_backup:
        helpers.ensuredir(backup_dir)
        timestamp = helpers.get_timestamp(use_second=True)

        def do_backup(fname):
            src = join(internal_dir, fname)
            dst_fname = ('%s_bak-' + timestamp + '%s') % splitext(fname)
            dst = join(backup_dir, dst_fname)
            if exists(src):
                shutil.copy(src, dst)

        do_backup(CHIP_TABLE_FNAME)
        do_backup(NAME_TABLE_FNAME)
        do_backup(IMAGE_TABLE_FNAME)
Esempio n. 9
0
def show_names(hs, qcx_list, fnum=1):
    '''The most recent plot names function, works with qcx_list'''
    print('[dev] show_names()')
    result_dir = hs.dirs.result_dir
    names_dir = join(result_dir, 'show_names')
    helpers.ensuredir(names_dir)
    # NEW:
    print(qcx_list)
    nx_list = np.unique(hs.tables.cx2_nx[qcx_list])
    print(nx_list)
    for nx in nx_list:
        viz.show_name(hs, nx, fnum=fnum)
        df2.save_figure(fpath=names_dir, usetitle=True)
    # OLD:
    #for (qcx) in qcx_list:
    #print('Showing q%s - %r' % (hs.cidstr(qcx, notes=True)))
    #notes = hs.cx2_property(qcx, 'Notes')
    #fnum = show_name(hs, qcx, fnum, subtitle=notes, annote=not hs.args.noannote)
    #if hs.args.save_figures:
    #df2.save_figure(fpath=names_dir, usetitle=True)
    return fnum
Esempio n. 10
0
def show_names(hs, qcx_list, fnum=1):
    '''The most recent plot names function, works with qcx_list'''
    print('[dev] show_names()')
    result_dir = hs.dirs.result_dir
    names_dir = join(result_dir, 'show_names')
    helpers.ensuredir(names_dir)
    # NEW:
    print(qcx_list)
    nx_list = np.unique(hs.tables.cx2_nx[qcx_list])
    print(nx_list)
    for nx in nx_list:
        viz.show_name(hs, nx, fnum=fnum)
        df2.save_figure(fpath=names_dir, usetitle=True)
    # OLD:
    #for (qcx) in qcx_list:
        #print('Showing q%s - %r' % (hs.cidstr(qcx, notes=True)))
        #notes = hs.cx2_property(qcx, 'Notes')
        #fnum = show_name(hs, qcx, fnum, subtitle=notes, annote=not hs.args.noannote)
        #if hs.args.save_figures:
            #df2.save_figure(fpath=names_dir, usetitle=True)
    return fnum
Esempio n. 11
0
def _delete_image(hs, gx_list):
    for gx in gx_list:
        cx_list = hs.gx2_cxs(gx)
        for cx in cx_list:
            hs.delete_chip(cx, resample=False)
        hs.tables.gx2_gname[gx] = ''

    trash_dir = join(hs.dirs.db_dir, 'deleted-images')
    src_list = hs.gx2_gname(gx_list, full=True)
    dst_list = hs.gx2_gname(gx_list, prefix=trash_dir)
    helpers.ensuredir(trash_dir)

    # Move deleted images into the trash
    move_list = zip(src_list, dst_list)
    mark_progress, end_progress = helpers.progress_func(len(move_list), lbl='Trashing Image')
    for count, (src, dst) in enumerate(move_list):
        shutil.move(src, dst)
        mark_progress(count)
    end_progress()
    hs.update_samples()
    hs.save_database()
Esempio n. 12
0
def _delete_image(hs, gx_list):
    for gx in gx_list:
        cx_list = hs.gx2_cxs(gx)
        for cx in cx_list:
            hs.delete_chip(cx, resample=False)
        hs.tables.gx2_gname[gx] = ''

    trash_dir = join(hs.dirs.db_dir, 'deleted-images')
    src_list = hs.gx2_gname(gx_list, full=True)
    dst_list = hs.gx2_gname(gx_list, prefix=trash_dir)
    helpers.ensuredir(trash_dir)

    # Move deleted images into the trash
    move_list = zip(src_list, dst_list)
    mark_progress, end_progress = helpers.progress_func(len(move_list),
                                                        lbl='Trashing Image')
    for count, (src, dst) in enumerate(move_list):
        shutil.move(src, dst)
        mark_progress(count)
    end_progress()
    hs.update_samples()
    hs.save_database()
Esempio n. 13
0
def export_subdatabase(hs, gx_list, new_dbdir):
    # New database dirs
    new_imgdir = join(new_dbdir, ld2.RDIR_IMG)
    new_internal = join(new_dbdir, ld2.RDIR_INTERNAL)
    print('[scripts] Exporting into %r' % new_dbdir)

    # Ensure new database
    helpers.ensuredir(new_dbdir)
    helpers.ensuredir(new_imgdir)
    helpers.ensuredir(new_internal)

    gname_list = hs.gx2_gname(gx_list)
    src_gname_list = hs.gx2_gname(gx_list, full=True)
    dst_gname_list = map(lambda gname: join(new_imgdir, gname), gname_list)

    copy_list = [(src, dst) for (src, dst) in zip(src_gname_list, dst_gname_list)]

    mark_progress, end_prog = helpers.progress_func(len(copy_list), lbl='Copy Images')
    for count, (src, dst) in enumerate(copy_list):
        shutil.copy(src, dst)
        mark_progress(count)
    end_prog()

    cx_list = [cx for cxs in hs.gx2_cxs(gx_list) for cx in cxs.tolist()]
    nx_list = np.unique(hs.tables.cx2_nx[cx_list])

    image_table = ld2.make_image_csv2(hs, gx_list)
    chip_table  = ld2.make_chip_csv2(hs, cx_list)
    name_table  = ld2.make_name_csv2(hs, nx_list)
    # csv filenames
    chip_table_fpath  = join(new_internal, ld2.CHIP_TABLE_FNAME)
    name_table_fpath  = join(new_internal, ld2.NAME_TABLE_FNAME)
    image_table_fpath = join(new_internal, ld2.IMAGE_TABLE_FNAME)
    # write csv files
    helpers.write_to(chip_table_fpath, chip_table)
    helpers.write_to(name_table_fpath, name_table)
    helpers.write_to(image_table_fpath, image_table)
    return locals()
Esempio n. 14
0
import matplotlib.image as image
import shutil

### from Hotspotter
from hscom import helpers
from hscom import argparse2
from hscom import params
from hscom import fileio as io
from hscom import __common__
from hotspotter import HotSpotterAPI
(print, print_, print_on, print_off, rrr,
 profile) = __common__.init(__name__, '[helpers]')

HOME = expanduser('~')
GLOBAL_CACHE_DIR = join(HOME, '.hotspotter/global_cache')
helpers.ensuredir(GLOBAL_CACHE_DIR)

#%%
# =============================================================================
#  Initialization (User needs to modify the below contens )
# =============================================================================
### New database path
dpath = 'C:\\Users\\95316\\code1\\Snow leopard'
###Database name
new_db = 'left_diff_cats'
### Full path: dapth + new_db

### Whether add a new database
Flag_new_db = False
### Whether add new images
Flag_add_img = False
Esempio n. 15
0
def get_test_results(hs, qcx_list, qdat, cfgx=0, nCfg=1,
                     force_load=False):
    dcxs = hs.get_indexed_sample()
    query_uid = qdat.get_uid()
    print('[harn] get_test_results(): %r' % query_uid)
    hs_uid    = hs.get_db_name()
    qcxs_uid  = helpers.hashstr_arr(qcx_list)
    test_uid  = hs_uid + query_uid + qcxs_uid
    cache_dir = join(hs.dirs.cache_dir, 'experiment_harness_results')
    io_kwargs = dict(dpath=cache_dir, fname='test_results', uid=test_uid, ext='.cPkl')
    # High level caching
    qx2_bestranks = []
    #nChips = hs.get_num_chip()
    #nNames = len(hs.tables.nx2_name) - 2
    nQuery = len(qcx_list)
    #NMultiNames =
    nPrevQ = nQuery * cfgx
    qx2_reslist = []
    if not hs.args.nocache_query and (not force_load):
        test_results = io.smart_load(**io_kwargs)
        if test_results is None:
            pass
        elif len(test_results) != 1:
            print('recaching test_results')
        elif not test_results is None:
            return test_results, [[{0: None}]] * nQuery
    for qx, qcx in enumerate(qcx_list):
        print(textwrap.dedent('''
        [harn]----------------
        [harn] TEST %d/%d
        [harn]----------------''' % (qx + nPrevQ + 1, nQuery * nCfg)))
        gt_cxs = hs.get_other_indexed_cxs(qcx)
        #title = 'q' + hs.cidstr(qcx) + ' - ' + notes
        #print('[harn] title=%r' % (title,))
        #print('[harn] gt_' + hs.cidstr(gt_cxs))
        res_list = mc3.execute_query_safe(hs, qdat, [qcx], dcxs)
        bestranks = []
        algos = []
        qx2_reslist += [res_list]
        assert len(res_list) == 1
        for qcx2_res in res_list:
            assert len(qcx2_res) == 1
            res = qcx2_res[qcx]
            algos += [res.title]
            gt_ranks = res.get_gt_ranks(gt_cxs)
            #print('[harn] cx_ranks(/%4r) = %r' % (nChips, gt_ranks))
            #print('[harn] cx_ranks(/%4r) = %r' % (NMultiNames, gt_ranks))
            #print('ns_ranks(/%4r) = %r' % (nNames, gt_ranks))
            if len(gt_ranks) == 0:
                _bestrank = -1
            else:
                _bestrank = min(gt_ranks)
            bestranks += [_bestrank]
        # record metadata
        qx2_bestranks += [bestranks]
    mat_vals = np.array(qx2_bestranks)
    test_results = (mat_vals,)
    # High level caching
    helpers.ensuredir(cache_dir)
    io.smart_save(test_results, **io_kwargs)
    return test_results, qx2_reslist