Ejemplo n.º 1
0
def sequential_feat_load(feat_cfg, feat_fpath_list):
    kpts_list = []
    desc_list = []
    # Debug loading (seems to use lots of memory)
    print('\n')
    try:
        nFeats = len(feat_fpath_list)
        prog_label = '[fc2] Loading feature: '
        mark_progress, end_progress = helpers.progress_func(nFeats, prog_label)
        for count, feat_path in enumerate(feat_fpath_list):
            try:
                npz = np.load(feat_path, mmap_mode=None)
            except IOError:
                print('\n')
                helpers.checkpath(feat_path, verbose=True)
                print('IOError on feat_path=%r' % feat_path)
                raise
            kpts = npz['arr_0']
            desc = npz['arr_1']
            npz.close()
            kpts_list.append(kpts)
            desc_list.append(desc)
            mark_progress(count)
        end_progress()
        print('[fc2] Finished load of individual kpts and desc')
    except MemoryError:
        print('\n------------')
        print('[fc2] Out of memory')
        print('[fc2] Trying to read: %r' % feat_path)
        print('[fc2] len(kpts_list) = %d' % len(kpts_list))
        print('[fc2] len(desc_list) = %d' % len(desc_list))
        raise
    if feat_cfg.whiten:
        desc_list = whiten_features(desc_list)
    return kpts_list, desc_list
Ejemplo n.º 2
0
def progress_func(maxval=0):
    mark_progress, end_progress = helpers.progress_func(maxval,
                                                        mark_after=MARK_AFTER,
                                                        progress_type='simple')
    #if maxval > MARK_AFTER:
    #print('')
    return mark_progress, end_progress
Ejemplo n.º 3
0
def delete_suffixed_images(hs, back):
    remove_cands = []
    gx2_gname = hs.tables.gx2_gname

    # Check to see if the image is a copy of another
    for gx, gname in enumerate(gx2_gname):
        name, ext = splitext(gname)
        components = name.split('_')
        if len(components) == 2:
            orig_name, copynum = components
            orig_gname = orig_name + ext
            copyof = np.where(gx2_gname == orig_gname)[0]
            if len(copyof) > 0:
                remove_cands.append((gx, copyof))

    # Make sure the images are actually duplicates
    remove_gxs = []
    orphaned_cxs = []
    for copy_gx, orig_gx in remove_cands:
        if isinstance(orig_gx, np.ndarray):
            orig_gx = orig_gx[0]
        if np.all(hs.gx2_image(copy_gx) == hs.gx2_image(orig_gx)):
            print('[script] duplicate found copy_gx=%r, orig_gx=%r' % (copy_gx, orig_gx))
            remove_gxs.append(copy_gx)
            copy_cxs = hs.gx2_cxs(copy_gx)
            orphaned_cxs.append((copy_cxs, orig_gx))

    # THESE ACTUALLY MODIFY THE DATABASE

    # Move all chips to the original
    for cx_list, orig_gx in orphaned_cxs:
        for cx in cx_list:
            print('[script] relocate cx=%r to gx=%r' % (cx, orig_gx))
            hs.tables.cx2_gx[cx] = orig_gx

    # Move deleted images into the trash
    trash_dir = join(hs.dirs.db_dir, 'deleted-images')
    src_list = hs.gx2_gname(remove_gxs, full=True)
    dst_list = hs.gx2_gname(remove_gxs, prefix=trash_dir)
    helpers.ensuredir(trash_dir)

    move_list = zip(src_list, dst_list)
    mark_progress, end_prog = helpers.progress_func(len(move_list), lbl='Trashing Image')
    for count, (src, dst) in enumerate(move_list):
        shutil.move(src, dst)
        mark_progress(count)
    end_prog()

    for gx in remove_gxs:
        print('[script] remove gx=%r' % (gx,))
        hs.tables.gx2_gname[gx] = ''

    # Update and save
    hs.update_samples()
    back.populate_image_table()

    hs.save_database()
    return locals()
Ejemplo n.º 4
0
def make_feature_graph(qreq, qcx2_res, use_networkx=True):
    # Make a graph between the chips
    cxfx2_ax = {(cx, fx): ax for ax, (cx, fx) in get_cxfx_enum(qreq)}
    def w_edge(cx1, cx2, fx1, fx2, score, rank):
        ax1 = cxfx2_ax[(cx1, fx1)]
        ax2 = cxfx2_ax[(cx2, fx2)]
        attr_dict =  {'score': score, 'rank': rank}
        return (ax1, ax2, attr_dict)
    nodes = [(ax, {'fx': fx, 'cx': cx}) for ax, (cx, fx) in get_cxfx_enum(qreq)]
    weighted_edges = [w_edge(cx1, cx2, fx1, fx2, score, rank)
                      for (cx1, res) in qcx2_res.iteritems()
                      for (cx2, (fx1, fx2), score, rank) in get_fmatch_iter(res)
                      if score > 0]
    if use_networkx:
        graph = netx.DiGraph()
        graph.add_nodes_from(nodes)
        graph.add_edges_from(weighted_edges)
    else:
        vx2_ax = cxfx2_ax.values()
        import graph_tool
        graph = graph_tool.Graph(g=None, directed=True, prune=False, vorder=None)
        vertex_list = graph.add_vertex(n=len(nodes))

        v_fx = graph.new_vertex_property("int")
        v_cx = graph.new_vertex_property("int")

        e_score = graph.new_edge_property("float")
        e_rank = graph.new_edge_property("int")

        for v, (ax, vprops) in zip(vertex_list, nodes):
            v_cx[v] = int(vprops['cx'])
            v_fx[v] = int(vprops['fx'])

        mark_prog, end_prog = util.progress_func(len(weighted_edges))
        count = 0
        for ax1, ax2, prop_dict in weighted_edges:
            mark_prog(count)
            count += 1
            vx1 = vx2_ax.index(ax1)
            vx2 = vx2_ax.index(ax2)
            v1 = graph.vertex(vx1)
            v2 = graph.vertex(vx2)
            e = graph.add_edge(v1, v2)
            e_score[e] = float(prop_dict['score'])
            e_rank[e] = int(prop_dict['rank'])
        mark_prog(count)
        end_prog()
        #import graph_tool.draw

        graph.save('test_graph.dot')
    return graph
Ejemplo n.º 5
0
 def add_images(hs, fpath_list, move_images=True):
     nImages = len(fpath_list)
     print('[hs.add_imgs] adding %d images' % nImages)
     img_dir = hs.dirs.img_dir
     copy_list = []
     helpers.ensurepath(img_dir)
     if move_images:
         # Build lists of where the new images will be
         fpath_list2 = [
             join(img_dir,
                  split(fpath)[1]) for fpath in fpath_list
         ]
         copy_iter = izip(fpath_list, fpath_list2)
         copy_list = [(src, dst) for src, dst in copy_iter
                      if not exists(dst)]
         nExist = len(fpath_list2) - len(copy_list)
         print('[hs] copying %d images' % len(copy_list))
         print('[hs] %d images already exist' % nExist)
         # RCOS TODO: Copying like this should be a helper function.
         # It appears in multiple places
         # Also there should be the option of parallelization? IDK, these are
         # disk writes, but it still might help.
         mark_progress, end_progress = helpers.progress_func(
             len(copy_list), lbl='Copying Image')
         for count, (src, dst) in enumerate(copy_list):
             shutil.copy(src, dst)
             mark_progress(count)
         end_progress()
     else:
         print('[hs.add_imgs] using original image paths')
         fpath_list2 = fpath_list
     # Get location of the new images relative to the image dir
     gx2_gname = hs.tables.gx2_gname.tolist()
     gx2_aif = hs.tables.gx2_aif.tolist()
     relpath_list = [relpath(fpath, img_dir) for fpath in fpath_list2]
     current_gname_set = set(gx2_gname)
     # Check to make sure the gnames are not currently indexed
     new_gnames = [
         gname for gname in relpath_list if not gname in current_gname_set
     ]
     new_aifs = [False] * len(new_gnames)
     nNewImages = len(new_gnames)
     nIndexed = nImages - nNewImages
     print('[hs.add_imgs] new_gnames:\n' + '\n'.join(new_gnames))
     print('[hs.add_imgs] %d images already indexed.' % nIndexed)
     print('[hs.add_imgs] Added %d new images.' % nIndexed)
     # Append the new gnames to the hotspotter table
     hs.tables.gx2_gname = np.array(gx2_gname + new_gnames)
     hs.tables.gx2_aif = np.array(gx2_aif + new_aifs)
     hs.update_samples()
     return nNewImages
Ejemplo n.º 6
0
 def add_images(hs, fpath_list, move_images=True):
     nImages = len(fpath_list)
     print('[hs.add_imgs] adding %d images' % nImages)
     img_dir = hs.dirs.img_dir
     copy_list = []
     helpers.ensurepath(img_dir)
     if move_images:
         # Build lists of where the new images will be
         fpath_list2 = [join(img_dir, split(fpath)[1]) for fpath in fpath_list]
         copy_iter = izip(fpath_list, fpath_list2)
         copy_list = [(src, dst) for src, dst in copy_iter if not exists(dst)]
         nExist = len(fpath_list2) - len(copy_list)
         print('[hs] copying %d images' % len(copy_list))
         print('[hs] %d images already exist' % nExist)
         # RCOS TODO: Copying like this should be a helper function.
         # It appears in multiple places
         # Also there should be the option of parallelization? IDK, these are
         # disk writes, but it still might help.
         mark_progress, end_progress = helpers.progress_func(len(copy_list), lbl='Copying Image')
         for count, (src, dst) in enumerate(copy_list):
             shutil.copy(src, dst)
             mark_progress(count)
         end_progress()
     else:
         print('[hs.add_imgs] using original image paths')
         fpath_list2 = fpath_list
     # Get location of the new images relative to the image dir
     gx2_gname = hs.tables.gx2_gname.tolist()
     gx2_aif   = hs.tables.gx2_aif.tolist()
     relpath_list = [relpath(fpath, img_dir) for fpath in fpath_list2]
     current_gname_set = set(gx2_gname)
     # Check to make sure the gnames are not currently indexed
     new_gnames = [gname for gname in relpath_list if not gname in current_gname_set]
     new_aifs   = [False] * len(new_gnames)
     nNewImages = len(new_gnames)
     nIndexed = nImages - nNewImages
     print('[hs.add_imgs] new_gnames:\n' + '\n'.join(new_gnames))
     print('[hs.add_imgs] %d images already indexed.' % nIndexed)
     print('[hs.add_imgs] Added %d new images.' % nIndexed)
     # Append the new gnames to the hotspotter table
     hs.tables.gx2_gname = np.array(gx2_gname + new_gnames)
     hs.tables.gx2_aif   = np.array(gx2_aif   + new_aifs)
     hs.update_samples()
     return nNewImages
Ejemplo n.º 7
0
def _delete_image(hs, gx_list):
    for gx in gx_list:
        cx_list = hs.gx2_cxs(gx)
        for cx in cx_list:
            hs.delete_chip(cx, resample=False)
        hs.tables.gx2_gname[gx] = ''

    trash_dir = join(hs.dirs.db_dir, 'deleted-images')
    src_list = hs.gx2_gname(gx_list, full=True)
    dst_list = hs.gx2_gname(gx_list, prefix=trash_dir)
    helpers.ensuredir(trash_dir)

    # Move deleted images into the trash
    move_list = zip(src_list, dst_list)
    mark_progress, end_progress = helpers.progress_func(len(move_list), lbl='Trashing Image')
    for count, (src, dst) in enumerate(move_list):
        shutil.move(src, dst)
        mark_progress(count)
    end_progress()
    hs.update_samples()
    hs.save_database()
Ejemplo n.º 8
0
def score_chipmatch_coverage(hs, qcx, chipmatch, qreq, method=0):
    prescore_method = 'csum'
    nShortlist = 100
    dcxs_ = set(qreq._dcxs)
    (cx2_fm, cx2_fs, cx2_fk) = chipmatch
    cx2_prescore = mf.score_chipmatch(hs, qcx, chipmatch, prescore_method, qreq)
    topx2_cx = cx2_prescore.argsort()[::-1]  # Only allow indexed cxs to be in the top results
    topx2_cx = [cx for cx in iter(topx2_cx) if cx in dcxs_]
    nRerank = min(len(topx2_cx), nShortlist)
    cx2_score = [0 for _ in xrange(len(cx2_fm))]
    mark_progress, end_progress = util.progress_func(nRerank, flush_after=10,
                                                     lbl='[cov] Compute coverage')
    for topx in xrange(nRerank):
        mark_progress(topx)
        cx2 = topx2_cx[topx]
        fm = cx2_fm[cx2]
        fs = cx2_fs[cx2]
        covscore = get_match_coverage_score(hs, qcx, cx2, fm, fs, method=method)
        cx2_score[cx2] = covscore
    end_progress()
    return cx2_score
Ejemplo n.º 9
0
def _delete_image(hs, gx_list):
    for gx in gx_list:
        cx_list = hs.gx2_cxs(gx)
        for cx in cx_list:
            hs.delete_chip(cx, resample=False)
        hs.tables.gx2_gname[gx] = ''

    trash_dir = join(hs.dirs.db_dir, 'deleted-images')
    src_list = hs.gx2_gname(gx_list, full=True)
    dst_list = hs.gx2_gname(gx_list, prefix=trash_dir)
    helpers.ensuredir(trash_dir)

    # Move deleted images into the trash
    move_list = zip(src_list, dst_list)
    mark_progress, end_progress = helpers.progress_func(len(move_list),
                                                        lbl='Trashing Image')
    for count, (src, dst) in enumerate(move_list):
        shutil.move(src, dst)
        mark_progress(count)
    end_progress()
    hs.update_samples()
    hs.save_database()
Ejemplo n.º 10
0
def export_subdatabase(hs, gx_list, new_dbdir):
    # New database dirs
    new_imgdir = join(new_dbdir, ld2.RDIR_IMG)
    new_internal = join(new_dbdir, ld2.RDIR_INTERNAL)
    print('[scripts] Exporting into %r' % new_dbdir)

    # Ensure new database
    helpers.ensuredir(new_dbdir)
    helpers.ensuredir(new_imgdir)
    helpers.ensuredir(new_internal)

    gname_list = hs.gx2_gname(gx_list)
    src_gname_list = hs.gx2_gname(gx_list, full=True)
    dst_gname_list = map(lambda gname: join(new_imgdir, gname), gname_list)

    copy_list = [(src, dst) for (src, dst) in zip(src_gname_list, dst_gname_list)]

    mark_progress, end_prog = helpers.progress_func(len(copy_list), lbl='Copy Images')
    for count, (src, dst) in enumerate(copy_list):
        shutil.copy(src, dst)
        mark_progress(count)
    end_prog()

    cx_list = [cx for cxs in hs.gx2_cxs(gx_list) for cx in cxs.tolist()]
    nx_list = np.unique(hs.tables.cx2_nx[cx_list])

    image_table = ld2.make_image_csv2(hs, gx_list)
    chip_table  = ld2.make_chip_csv2(hs, cx_list)
    name_table  = ld2.make_name_csv2(hs, nx_list)
    # csv filenames
    chip_table_fpath  = join(new_internal, ld2.CHIP_TABLE_FNAME)
    name_table_fpath  = join(new_internal, ld2.NAME_TABLE_FNAME)
    image_table_fpath = join(new_internal, ld2.IMAGE_TABLE_FNAME)
    # write csv files
    helpers.write_to(chip_table_fpath, chip_table)
    helpers.write_to(name_table_fpath, name_table)
    helpers.write_to(image_table_fpath, image_table)
    return locals()
Ejemplo n.º 11
0
def draw_images_at_positions(img_list, pos_list):
    print('[encounter] drawing %d images' % len(img_list))
    # Thumb stack
    ax  = df2.gca()
    fig = df2.gcf()
    trans = ax.transData.transform
    trans2 = fig.transFigure.inverted().transform
    mark_progress, end_progress = util.progress_func(len(pos_list), lbl='drawing img')
    for ix, ((x, y), img) in enumerate(izip(pos_list, img_list)):
        mark_progress(ix)
        xx, yy = trans((x, y))  # figure coordinates
        xa, ya = trans2((xx, yy))  # axes coordinates
        #
        width, height = img.shape[0:2]
        tlx = xa - (width / 2.0)
        tly = ya - (height / 2.0)
        img_bbox = [tlx, tly, width, height]
        # Make new axis for the image
        img_ax = df2.plt.axes(img_bbox)
        img_ax.imshow(img)
        img_ax.set_aspect('equal')
        img_ax.axis('off')
    end_progress()
Ejemplo n.º 12
0
def warp_srcimg_to_kpts(fx2_kp, srcimg, chip_shape, fx2_score=None, **kwargs):
    if len(fx2_kp) == 0:
        return None
    if fx2_score is None:
        fx2_score = np.ones(len(fx2_kp))
    scale_factor = kwargs.get('scale_Factor', SCALE_FACTOR_DEFAULT)
    # Build destination image
    (h, w) = map(int, (chip_shape[0] * scale_factor, chip_shape[1] * scale_factor))
    dstimg = np.zeros((h, w), dtype=np.float32)
    dst_copy = dstimg.copy()
    src_shape = srcimg.shape
    # Build keypoint transforms
    fx2_M = build_transforms(fx2_kp, (h, w), src_shape, scale_factor)
    # cv2 warp flags
    dsize = (w, h)
    flags = cv2.INTER_LINEAR  # cv2.INTER_LANCZOS4
    boderMode = cv2.BORDER_CONSTANT
    # mark prooress
    mark_progress, end_progress = util.progress_func(len(fx2_M),
                                                     flush_after=20,
                                                     mark_after=1000,
                                                     lbl='coverage warp ')
    # For each keypoint warp a gaussian scaled by the feature score
    # into the image
    count = 0
    for count, (M, score) in enumerate(izip(fx2_M, fx2_score)):
        mark_progress(count)
        warped = cv2.warpAffine(srcimg * score, M, dsize,
                                dst=dst_copy,
                                flags=flags, borderMode=boderMode,
                                borderValue=0).T
        catmat = np.dstack((warped.T, dstimg))
        dstimg = catmat.max(axis=2)
    mark_progress(count)
    end_progress()
    return dstimg
Ejemplo n.º 13
0
def progress_func(maxval=0):
    mark_progress, end_progress = helpers.progress_func(maxval, mark_after=MARK_AFTER, progress_type='simple')
    #if maxval > MARK_AFTER:
        #print('')
    return mark_progress, end_progress