Exemple #1
0
def main(data_base_dir, data_list):

    for data in data_list:
        print("processing {}".format(data))
        data_dir = os.path.join(data_base_dir, data, "set_100")
        image_dir = os.path.join(data_dir, "images")
        depth_dir = os.path.join(data_dir, "depth_maps")
        calib_dir = os.path.join(data_dir, "calibration")
        calib_list = get_fullpath_list(data_dir, "calibration")
        calib_dict = load_calib(calib_list)
        for key, calib in calib_dict.items():
            r = np.asarray(calib["R"])
            t = np.asarray(calib["T"])
            k = np.asarray(calib["K"])

            # get calibration file path
            calib_file_name = "calibration_" + key + ".h5"
            calib_path = os.path.join(calib_dir, calib_file_name)

            # get image file path
            image_file_name = key + ".jpg"
            image_path = os.path.join(image_dir, image_file_name)

            # read depth_map
            depth_file_name = key + ".h5"
            depth_path = os.path.join(depth_dir, depth_file_name)

            # load depth map and image
            image = cv2.imread(image_path, cv2.IMREAD_UNCHANGED)
            try:
                depth_dict = load_h5(depth_path)
                #print (image.shape)
                #print (depth_dict.keys(), depth_dict["min_distance"].shape)
                #print (depth_dict["depth"])
                #print (depth_dict["depth"].shape)
            except:
                depth_dict = {
                    "depth": np.zeros((image.shape[0], image.shape[1])),
                    "min_distance": np.zeros((image.shape[0], image.shape[1]))
                }
            depth_map = depth_dict["depth"]

            r, t, k, image, depth_map, changed_flag = roatat_image(
                r, t, k, image, depth_map)

            calib["T"] = t
            calib["R"] = r
            calib["K"] = k
            if changed_flag:
                # svae calibration file
                save_h5(calib, calib_path)

                # save rotated image
                result = cv2.imwrite(os.path.join(image_dir,
                                                  key + ".jpg"), image,
                                     [int(cv2.IMWRITE_JPEG_QUALITY), 100])

                # save depth map
                depth_dict["depth"] = depth_map
                save_h5(depth_dict, depth_path)
Exemple #2
0
def write_visibility_files(set_loc, set_xx_key, data_loc, set_xx_idx):
    vis_list = get_fullpath_list(data_loc, "visibility")
    vis_dir = os.path.join(set_loc, 'visibility')
    if not os.path.exists(vis_dir):
        os.makedirs(vis_dir)

    # For each key in `set_xx`, load its visibility file and
    # feed the current visibility file with the order
    vis = load_vis(vis_list)
    for idx_key, name_key in zip(set_xx_idx, set_xx_key):
        vis_key = vis[idx_key]

        vis_file_name = 'vis_' + name_key + '.txt'

        # For each image, generate a visibility file
        # Store num matches from the information in pairs
        # If same image, keep -1
        # If no mathes, keep 0
        # First get the file name
        file_name = os.path.join(vis_dir, vis_file_name)

        # Open file and fill contents
        with open(file_name, 'w') as f:
            for key2 in set_xx_idx:
                f.write(str(vis_key[key2]) + '\n')
Exemple #3
0
def write_visibility_files(set_loc, set_xx_key, data_loc, set_xx_idx ):
    vis_list = get_fullpath_list(data_loc, "visibility")
Exemple #4
0
def gen_subset(data_loc_master, data_list, max_num_pairs):
    for d in data_list:
        print('Working on {}'.format(d))
        print('Create set_100')
        data_loc = os.path.join(os.path.join(data_loc_master, d), 'all')
        set_loc = os.path.join(os.path.join(data_loc_master, d), 'set_100_new')
        if not os.path.exists(set_loc):
            os.makedirs(set_loc)

        # load image pairs
        vis_th = 100
        vis_list = get_fullpath_list(data_loc, "visibility")
        images_list = get_fullpath_list(data_loc, "images")
        image_pairs = compute_image_pairs(vis_list, len(images_list), vis_th)

        # get number of images
        num_images = len(images_list)

        # get a subset of image pairs to prevent
        #image_pairs = random.sample(image_pairs, min(max_num_pairs, len(image_pairs)))

        # Generate pairs dict
        keys_list, pairs_dict = gen_pair_dict_full(image_pairs)

        # Generate triplets
        #triplets = gen_triplets(keys_list, pairs_dict)
        #print('Triplets (Before) =', len(triplets))
        # Run a while loop on triplets to sample 100 images
        set_size = min(100, num_images)
        set_100_idx = get_set_100(pairs_dict)
        set_100_idx = sorted(set_100_idx)
        set_100_key = images_idx2images_key(set_100_idx, images_list)
        write_images_txt(set_loc, set_100_key, data_loc)
        write_depth_maps_txt(set_loc, set_100_key, data_loc)
        write_calibration_txt(set_loc, set_100_key, data_loc)
        write_visibility_txt(set_loc, set_100_key, data_loc)

        copy_images(set_loc, set_100_key, data_loc)
        copy_depth_maps(set_loc, set_100_key, data_loc)
        copy_calibration_files(set_loc, set_100_key, data_loc)
        write_visibility_files(set_loc, set_100_key, data_loc, set_100_idx)
        write_new_vis_pairs(set_loc, set_100_key, data_loc)

        # Now compute new set of triplet from set_100 - Reduces search space
        print('Gen new triplets')
        vis_list = get_fullpath_list(set_loc, "visibility")
        images_list = get_fullpath_list(set_loc, "images")
        image_pairs = compute_image_pairs(vis_list, len(images_list), vis_th)

        # Generate pairs dict
        keys_list, pairs_dict = gen_pair_dict_full(image_pairs)

        # Generate triplets
        triplets = gen_triplets(keys_list, pairs_dict)
        print('Triplets (After) =', len(triplets))

        # 3bag gen
        print("Generating bag 3")
        selected_triplets = []
        for idx in tqdm(range(100)):
            if not os.path.exists(set_loc):
                os.makedirs(set_loc)
            while True:
                current_sample = random.choice(triplets)
                # Memoization
                if current_sample not in selected_triplets:
                    selected_triplets.append(current_sample)
                    current_sample = sorted(current_sample)
                    write_subset_images_txt(
                        set_loc,
                        images_idx2images_key(current_sample, images_list),
                        data_loc, idx, '3bag')
                    break
        print("Generating bag 5")
        selected_5bags = []
        trial = 0
        for idx in tqdm(range(100)):
            if not os.path.exists(set_loc):
                os.makedirs(set_loc)
            while True:
                # First get a triplet
                current_sample = random.choice(triplets)
                # Check if it's a valid triplet
                temp_sample = random.choice(triplets)
                counter = 0
                values_to_append = []
                for i in temp_sample:
                    if i not in current_sample:
                        values_to_append.append(i)
                        counter = counter + 1
                if counter == 2:
                    current_sample = current_sample + values_to_append
                    current_sample.sort()
                    current_sample = current_sample[::-1]
                    trial += 1
                    if trial > 500000:
                        break
                    if current_sample not in selected_5bags:
                        selected_5bags.append(current_sample)
                        current_sample = sorted(current_sample)
                        write_subset_images_txt(
                            set_loc,
                            images_idx2images_key(current_sample, images_list),
                            data_loc, idx, '5bag')
                        break

        trial = 0
        print("Generating bag 10")
        selected_10bags = []
        for idx in tqdm(range(100)):
            while True:
                # First get a triplet
                current_sample = get_10bag(triplets)
                trial += 1
                if trial > 500000:
                    break
                if current_sample not in selected_10bags:
                    selected_10bags.append(current_sample)
                    current_sample = sorted(current_sample)
                    write_subset_images_txt(
                        set_loc,
                        images_idx2images_key(current_sample, images_list),
                        data_loc, idx, '10bag')
                    break