コード例 #1
0
def main():
    """ main """

    patch_subd = os.listdir(PATCH_DIR)
    for pd in patch_subd:
        os.system("mkdir -p " + FEAT_DIR + "hist_feats/" + pd)

    if args.choice == "feats":
        feat_ext()

    elif args.choice == "cluster":
        cluster_feats()

    elif args.choice == "hist":
        all_fids = read_simple_flist(ETC_D + "all.flist")

        print("File IDs:", len(all_fids))
        chunks = chunkify(all_fids, int(cpu_count() / 2))

        pool = Pool(int(cpu_count() / 2))
        pool.map(parallel_sift_hist_feat_ext, chunks)
        pool.close()
        pool.join()

        # parallel_sift_hist_feat_ext(chunks[0])

    else:
        print("Invalid choice")
コード例 #2
0
def main():
    """ main """

    patch_subd = os.listdir(PATCH_DIR)
    for pd in patch_subd:
        os.system("mkdir -p " + FEAT_DIR + "hist_feats/" + pd)

    if args.choice == "feats":
        feat_ext()

    elif args.choice == "cluster":
        cluster_feats()

    elif args.choice == "hist":
        train_fids = read_simple_flist(FEAT_DIR + "train_fids.list")
        chunks = chunkify(train_fids, 4)

        pool = Pool(4)
        pool.map(parallel_sift_hist_feat_ext, chunks)
        pool.close()
        pool.join()

        # parallel_sift_hist_feat_ext(chunks[0])

    elif args.choice == "valid":

        val_flist = sorted(read_simple_flist(LAB_DIR + "val.txt",
                                             pre=IMAGE_DIR, sfx=EXT))

        val_fids = []
        all_d = []
        all_k = []
        file_kp_size = []

        st = 0
        for i, tf in enumerate(val_flist):

            k, d = get_descriptors(tf)

            all_d.append(d)  # append all desc into one list

            # get the file ID (unique key) of the image
            fid = os.path.splitext(os.path.basename(tf))[0]
            val_fids.append(fid)

            pickle.dump(all_k, open(FEAT_DIR + "kp/kp_" + fid + ".pkl", "wb"))

            # save file ID to no of key points in dict
            file_kp_size.append([st, st + len(k)])
            st += len(k)

    else:
        print("Invalid choice")
コード例 #3
0
def main():
    """ main method """

    os.system("mkdir -p " + TREE_DIR)

    n_jobs = int(cpu_count() / 2)

    flist = read_simple_flist(ETC_D + "all.flist")

    chunks = chunkify(flist, n_jobs)

    pool = Pool(n_jobs)
    pool.map(parallel_tree_construct, chunks)
    pool.close()
    pool.join()
コード例 #4
0
def main():
    """ main method """

    # Extract features for each image (patch wise)
    im_files = sorted(os.listdir(IMAGE_DIR))
    os.system("mkdir -p " + FEAT_DIR + "color_feats/")

    patch_subd = os.listdir(PATCH_DIR)
    print(patch_subd)
    for pd in patch_subd:
        os.system("mkdir -p " + FEAT_DIR + "color_feats/" + pd)

    pool = Pool(16)
    chunks = chunkify(im_files, 16)

    pool.map(par_feat_ext, chunks)
    pool.close()
    pool.join()
コード例 #5
0
def main():
    """ main """

    global GP_LABEL

    etc_d = FEAT_DIR + "etc/"

    if args.choice == "annot":

        fid_obj, fid_box, obj_names = parse_annotations(etc_d)

    elif args.choice == "gt":

        fid_obj = pickle.load(open(etc_d + "fid_obj.pkl", "rb"))
        fid_box = pickle.load(open(etc_d + "fid_box.pkl", "rb"))

        GP_LABEL = get_gt_label_info(fid_box, fid_obj)
        from pprint import pprint
        pprint(GP_LABEL)
        pickle.dump(GP_LABEL, open(etc_d + "gp_label.pkl", "wb"))

    elif args.choice == "prep":

        GP_LABEL = pickle.load(open(etc_d + "gp_label.pkl", "rb"))
        print("GP labels:", len(GP_LABEL))

        if os.path.exists(etc_d + "train.flist") is False:
            print("* Generating train and test splits .....")
            gt_files = os.listdir(SEG_DIR)
            fids = [gtf.split(".")[0] for gtf in gt_files]
            shuffle(fids)

            train_size = int(len(fids) * 0.8)
            train_fids = fids[:train_size]
            test_fids = fids[train_size:]

            print("Trian, test, total:", len(train_fids), len(test_fids),
                  len(fids))

            save_fids(train_fids, etc_d + "train.flist")
            save_fids(test_fids, etc_d + "test.flist")
            save_fids(train_fids + test_fids, etc_d + "all.flist")

        else:

            train_fids = read_simple_flist(etc_d + "train.flist")
            test_fids = read_simple_flist(etc_d + "test.flist")

        info_dir = FEAT_DIR + "patch_label_info/"
        os.makedirs(info_dir, exist_ok=True)

        all_fids = train_fids + test_fids
        print("File IDs:", len(all_fids))

        n_jobs = int(cpu_count() / 2)

        if n_jobs > 10:
            n_jobs = 20

        print('n_jobs:', n_jobs)

        chunks = chunkify(all_fids, n_jobs)

        pool = Pool(n_jobs)
        pool.map(parallel_data_prep_svm, chunks)
        pool.close()
        pool.join()

    elif args.choice == "map":

        os.system("mkdir -p " + train_d)

        info_files = os.listdir(info_d)
        print("Info files:", len(info_files))

        n_jobs = int(cpu_count() / 2)
        chunks = chunkify(info_files, n_jobs)

        # parallel_feat_map(info_files[:2])

        p = Pool(n_jobs)
        p.map(parallel_feat_map, chunks)
        p.close()
        p.join()