Пример #1
0
def main():
    """ main """

    patch_subd = os.listdir(PATCH_DIR)
    for pd in patch_subd:
        os.system("mkdir -p " + FEAT_DIR + "hist_feats/" + pd)

    if args.choice == "feats":
        feat_ext()

    elif args.choice == "cluster":
        cluster_feats()

    elif args.choice == "hist":
        train_fids = read_simple_flist(FEAT_DIR + "train_fids.list")
        chunks = chunkify(train_fids, 4)

        pool = Pool(4)
        pool.map(parallel_sift_hist_feat_ext, chunks)
        pool.close()
        pool.join()

        # parallel_sift_hist_feat_ext(chunks[0])

    elif args.choice == "valid":

        val_flist = sorted(read_simple_flist(LAB_DIR + "val.txt",
                                             pre=IMAGE_DIR, sfx=EXT))

        val_fids = []
        all_d = []
        all_k = []
        file_kp_size = []

        st = 0
        for i, tf in enumerate(val_flist):

            k, d = get_descriptors(tf)

            all_d.append(d)  # append all desc into one list

            # get the file ID (unique key) of the image
            fid = os.path.splitext(os.path.basename(tf))[0]
            val_fids.append(fid)

            pickle.dump(all_k, open(FEAT_DIR + "kp/kp_" + fid + ".pkl", "wb"))

            # save file ID to no of key points in dict
            file_kp_size.append([st, st + len(k)])
            st += len(k)

    else:
        print("Invalid choice")
Пример #2
0
def main():
    """ main method """

    train_fids = read_simple_flist(ETC_D + "train.flist")
    fid_obj = pickle.load(open(ETC_D + 'fid_obj.pkl', 'rb'))

    # for each image, get the patch files
    # load the patch level ground truth and the svm output
    # load the ground truth labels for each patch

    for fid in train_fids:
        fid = '2007_000032'
        print(fid, fid_obj[fid])
        patch_gt = pickle.load(open(PATCH_LAB_DIR + fid + ".pkl", "rb"))
        tree = pickle.load(open(TREE_DIR + fid + ".pkl", "rb"))

        v01 = list(tree['01'].values())
        v12k = tree['12'].keys()

        tmp = {}
        for v0 in v01:
            try:
                tmp[v0] += 1
            except KeyError:
                tmp[v0] = 1

        print(len(v01), len(tmp), len(v12k))

        break
def main():
    """ main """

    patch_subd = os.listdir(PATCH_DIR)
    for pd in patch_subd:
        os.system("mkdir -p " + FEAT_DIR + "hist_feats/" + pd)

    if args.choice == "feats":
        feat_ext()

    elif args.choice == "cluster":
        cluster_feats()

    elif args.choice == "hist":
        all_fids = read_simple_flist(ETC_D + "all.flist")

        print("File IDs:", len(all_fids))
        chunks = chunkify(all_fids, int(cpu_count() / 2))

        pool = Pool(int(cpu_count() / 2))
        pool.map(parallel_sift_hist_feat_ext, chunks)
        pool.close()
        pool.join()

        # parallel_sift_hist_feat_ext(chunks[0])

    else:
        print("Invalid choice")
def parallel_sift_hist_feat_ext(lst):
    """ Parallel SIFT histogram feature extraction """

    hist_dir = FEAT_DIR + "hist_feats/"
    kp_index = np.load(FEAT_DIR + "kp_index.npy")
    c_ixs = np.load(FEAT_DIR + "sift_vq.npy")
    train_fids = read_simple_flist(ETC_D + "all_sift.flist")

    patch_subd = os.listdir(PATCH_DIR)
    for i, fid in enumerate(lst):

        ifile = IMAGE_DIR + fid + EXT
        f_ix = train_fids.index(fid)
        k_ix = kp_index[f_ix]
        f_c_ixs = c_ixs[k_ix[0]:k_ix[1]]

        print("\r{0:d}/{1:d}".format(i + 1, len(lst)), end="")

        for pd in patch_subd:
            pfile = PATCH_DIR + pd + "/" + fid + EXT
            p_hist_f, p_clr = compute_patch_sift_hist(IMAGE_DIR + ifile, pfile,
                                                      f_c_ixs)

            np.save(hist_dir + pd + "/" + fid + ".npy", p_hist_f)

            pickle.dump(p_clr, open(hist_dir + pd + "/" + fid + ".pkl", "wb"))
Пример #5
0
def feat_ext():

    # pwd = os.path.dirname(os.path.realpath(__file__)) + "/"

    # tmp_d = pwd + "../tmp/"
    # etc_d = pwd + "../etc/"

    # os.system("mkdir -p " + etc_d)
    # os.system("mkdir -p " + tmp_d)

    # fid_labels = pickle.load(open(etc_d + "fid_labels.pkl", "rb"))
    # labels = read_simple_flist(etc_d + "labels.txt")

    # train_fids = read_simple_flist(lab_d + "train.txt")
    # val_fids = read_simple_flist(lab_d + "val.txt")

    train_flist = sorted(read_simple_flist(LAB_DIR + "train.txt",
                                           pre=IMAGE_DIR, sfx=EXT))

    # val_flist = sorted(read_simple_flist(LAB_DIR + "val.txt", pre=IMAGE_DIR,
    #                                      sfx=EXT))

    train_fids = []
    all_d = []
    all_k = []
    file_kp_size = []

    st = 0
    for i, tf in enumerate(train_flist):

        k, d = get_descriptors(tf)

        all_d.append(d)  # append all desc into one list

        # get the file ID (unique key) of the image
        fid = os.path.splitext(os.path.basename(tf))[0]
        train_fids.append(fid)

        pickle.dump(all_k, open(FEAT_DIR + "kp/kp_" + fid + ".pkl", "wb"))

        # save file ID to no of key points in dict
        file_kp_size.append([st, st + len(k)])
        st += len(k)

    all_d = np.concatenate(all_d)
    np.save(FEAT_DIR + "sift_train.npy", all_d)
    print('all desc:', all_d.shape, 'saved.')

    with open(FEAT_DIR + "train_fids.list", "w") as fpw:
        fpw.write("\n".join(train_fids))

    file_kp_size = np.asarray(file_kp_size)
    np.save(FEAT_DIR + "kp_index.npy", file_kp_size)
    print(file_kp_size.shape, 'saved.')

    print('Done')
Пример #6
0
def main():
    """ main method """

    os.system("mkdir -p " + TREE_DIR)

    n_jobs = int(cpu_count() / 2)

    flist = read_simple_flist(ETC_D + "all.flist")

    chunks = chunkify(flist, n_jobs)

    pool = Pool(n_jobs)
    pool.map(parallel_tree_construct, chunks)
    pool.close()
    pool.join()
def main():
    """ main """

    global GP_LABEL

    etc_d = FEAT_DIR + "etc/"

    if args.choice == "annot":

        fid_obj, fid_box, obj_names = parse_annotations(etc_d)

    elif args.choice == "gt":

        fid_obj = pickle.load(open(etc_d + "fid_obj.pkl", "rb"))
        fid_box = pickle.load(open(etc_d + "fid_box.pkl", "rb"))

        GP_LABEL = get_gt_label_info(fid_box, fid_obj)
        from pprint import pprint
        pprint(GP_LABEL)
        pickle.dump(GP_LABEL, open(etc_d + "gp_label.pkl", "wb"))

    elif args.choice == "prep":

        GP_LABEL = pickle.load(open(etc_d + "gp_label.pkl", "rb"))
        print("GP labels:", len(GP_LABEL))

        if os.path.exists(etc_d + "train.flist") is False:
            print("* Generating train and test splits .....")
            gt_files = os.listdir(SEG_DIR)
            fids = [gtf.split(".")[0] for gtf in gt_files]
            shuffle(fids)

            train_size = int(len(fids) * 0.8)
            train_fids = fids[:train_size]
            test_fids = fids[train_size:]

            print("Trian, test, total:", len(train_fids), len(test_fids),
                  len(fids))

            save_fids(train_fids, etc_d + "train.flist")
            save_fids(test_fids, etc_d + "test.flist")
            save_fids(train_fids + test_fids, etc_d + "all.flist")

        else:

            train_fids = read_simple_flist(etc_d + "train.flist")
            test_fids = read_simple_flist(etc_d + "test.flist")

        info_dir = FEAT_DIR + "patch_label_info/"
        os.makedirs(info_dir, exist_ok=True)

        all_fids = train_fids + test_fids
        print("File IDs:", len(all_fids))

        n_jobs = int(cpu_count() / 2)

        if n_jobs > 10:
            n_jobs = 20

        print('n_jobs:', n_jobs)

        chunks = chunkify(all_fids, n_jobs)

        pool = Pool(n_jobs)
        pool.map(parallel_data_prep_svm, chunks)
        pool.close()
        pool.join()

    elif args.choice == "map":

        os.system("mkdir -p " + train_d)

        info_files = os.listdir(info_d)
        print("Info files:", len(info_files))

        n_jobs = int(cpu_count() / 2)
        chunks = chunkify(info_files, n_jobs)

        # parallel_feat_map(info_files[:2])

        p = Pool(n_jobs)
        p.map(parallel_feat_map, chunks)
        p.close()
        p.join()
def feat_ext():

    # pwd = os.path.dirname(os.path.realpath(__file__)) + "/"

    # tmp_d = pwd + "../tmp/"
    # etc_d = pwd + "../etc/"

    # os.system("mkdir -p " + etc_d)
    # os.system("mkdir -p " + tmp_d)

    # fid_labels = pickle.load(open(etc_d + "fid_labels.pkl", "rb"))
    # labels = read_simple_flist(etc_d + "labels.txt")

    train_flist = sorted(
        read_simple_flist(ETC_D + "all_sift.flist", pre=IMAGE_DIR, sfx=EXT))

    print("Files:", len(train_flist))
    os.system("mkdir -p " + FEAT_DIR + "kp/")
    train_fids = []
    all_k = []
    all_d = []
    file_kp_size = []

    st = 0
    for i, tf in enumerate(train_flist):

        if tf[-4:] != EXT:
            continue

        fid = os.path.splitext(os.path.basename(tf))[0]
        kp_file = FEAT_DIR + "kp/kp_" + fid + ".pkl"

        print("\r{0:d}/{1:d}".format(i + 1, len(train_flist)), tf, end="")

        k, d = get_descriptors(tf)

        all_k = [kp.pt for kp in k]
        all_d.append(d)  # append all desc into one list

        # print(len(k), d.shape)

        # get the file ID (unique key) of the image
        train_fids.append(fid)

        pickle.dump(all_k, open(kp_file, "wb"))

        # import sys
        # sys.exit()

        # save file ID to no of key points in dict
        file_kp_size.append([st, st + len(k)])
        st += len(k)

    all_d = np.concatenate(all_d)
    np.save(FEAT_DIR + "sift_train.npy", all_d)
    print('all desc:', all_d.shape, 'saved.')

    # with open(FEAT_DIR + "train_fids.list", "w") as fpw:
    #    fpw.write("\n".join(train_fids))

    file_kp_size = np.asarray(file_kp_size)
    np.save(FEAT_DIR + "kp_index.npy", file_kp_size)
    print(file_kp_size.shape, 'saved.')

    print('Done')