def __init__(self, dataset_root, proposals_path, output_root, scene,
                 sampling_params, image_params, match_params, plot_params):
        super(TripletGeneratorNode, self).__init__(dataset_root=dataset_root,
                                                   proposals_path=proposals_path,
                                                   output_root=output_root)
        SLURM_SUFFIX = datetime.now().strftime("%d-%m-%Y_%H_%M_%S")

        try:
            SLURM_JOB_ID = str(os.environ["SLURM_JOB_ID"])
            SLURM_SUFFIX = SLURM_JOB_ID
        except KeyError:
            print('Slurm Job Id not avaialable')

        self.scene = scene
        self.sampling_params = sampling_params
        self.image_params = image_params
        self.match_params = match_params
        self.plot_params = plot_params
        self.output_scene_path = os.path.join(self.output_root, scene, SLURM_SUFFIX)
        self.triplet_save_path = os.path.join(self.output_scene_path, 'triplets')
        self.plot_save_path = os.path.join(self.output_scene_path, 'plots')

        if self.sampling_params['same_test_scene']:
            raise NotImplementedError(
                'Need to fix code if same scene used for training and testing')

        # SET PATHS
        self.scene_path = os.path.join(dataset_root, scene)
        self.scene_img_path = os.path.join(self.scene_path, 'jpg_rgb')
        self.scene_depth_path = os.path.join(self.scene_path, 'high_res_depth')

        # LOAD ANNOTATIONS, CAMERA EXTRINSICS and CAMERA INTRINSICS
        self.camera_intrinsics = load_camera_params(dataset_root=dataset_root, scene=scene)
        # TODO: Add scaled parameters if bbox is scaled
        self.cx_prop = self.camera_intrinsics['cx']
        self.cy_prop = self.camera_intrinsics['cy']
        self.fx_prop = self.camera_intrinsics['fx']
        self.fy_prop = self.camera_intrinsics['fy']

        with open(os.path.join(self.scene_path, 'annotations.json')) as f:
            self.annotation = json.load(f)
        self.image_struct, self.scale = load_image_struct(self.scene_path)

        # MAKE FOLDERS FOR SAVING
        if os.path.exists(self.plot_save_path) or os.path.exists(self.triplet_save_path):
            if len(os.listdir(self.plot_save_path)) != 0 or len(
                    os.listdir(self.triplet_save_path)) != 0:
                raise Exception(
                    f'Path {self.plot_save_path} or {self.triplet_save_path} already exists!')
        else:
            os.makedirs(self.plot_save_path)
            os.makedirs(self.triplet_save_path)

        # LOGGING
        self.logger = logging.getLogger(__name__)
        self.start_time = datetime.now().strftime("%Y-%m-%d--%H:%M:%S")
        self.log_path = self.output_scene_path

        if not os.path.exists(self.log_path):
            os.mkdir(self.log_path)

        logging.basicConfig(
            filename=os.path.join(self.log_path, f'triplet_gen_{self.scene}_{self.start_time}.log'),
            filemode='w')
        self.logger.setLevel(logging.DEBUG)

        with open(os.path.join(self.output_scene_path, 'time.txt'), 'a') as f:
            f.write("START TIME: " + datetime.now().strftime("%m-%d-%Y_%H:%M:%S") + '\n')

        with open(os.path.join(self.output_scene_path, f'params_{self.scene}.txt'), 'w') as f:
            _params = {
                'SAMPLING_PARAMS': self.sampling_params,
                'IMAGE_PARAMS': self.image_params,
                'MATCH_PARAMS': self.match_params,
                'PLOT_PARAMS': self.plot_params
            }

            json.dump(_params, f)
            del _params

        # Initial clustering of places/nodes in dataset
        self.cluster_centers, self.cluster_nodes = distance_sort_nodes(
            image_struct=self.image_struct,
            scale=self.scale, near_threshold=25,
            visualize_nodes=False)
        self.ref_views = None
    def __init__(self, dataset_root, proposals_root, triplet_root, scene,
                 proposals_per_img=None, hierarchy=None,
                 sample_per_hierarchy=None, proposals_neighbor_img=None,
                 neg_bbox_iou_thresh=None, test_split=0.3):

        camera_intrinsics = camera['camera_intrinsics']

        if proposals_per_img is None:
            proposals_per_img = pixel_bbox_assoc['proposals_per_img']
        if hierarchy is None:
            hierarchy = pixel_bbox_assoc['hierarchy']
        if sample_per_hierarchy is None:
            sample_per_hierarchy = pixel_bbox_assoc['sample_per_hierarchy']
        if proposals_neighbor_img is None:
            proposals_neighbor_img = pixel_bbox_assoc['proposals_neighbor_img']
        if neg_bbox_iou_thresh is None:
            neg_bbox_iou_thresh = pixel_bbox_assoc['neg_bbox_iou_thresh']

        self.dataset_root = dataset_root
        self.proposals_root = proposals_root
        self.triplet_root = triplet_root
        self.scene = scene
        self.camera_intrinsics = camera_intrinsics
        self.proposals_per_img = proposals_per_img
        self.hierarchy = hierarchy
        self.sample_per_hierarchy = sample_per_hierarchy
        self.proposals_neighbor_img = proposals_neighbor_img
        self.neg_bbox_iou_thresh = neg_bbox_iou_thresh
        self.test_split = test_split

        self.image_struct_dict = dict()  # k=folder, v={'img_struct', 'scale'}

        # print(f'Loading Image Parameters and Annotations...')

        # Load parameters and annotations for each folder in dataset
        folder_path = os.path.join(dataset_root, self.scene)
        img_st, scale = load_image_struct(folder_path)
        self.image_struct_dict[scene] = {
            'image_struct': img_st,
            'scale': scale
        }

        # Get neighbors of each proposal img
        # print(f'Creating Neighbors list...')
        self.cluster_centers, self.clusters = distance_sort_nodes(
            image_struct=self.image_struct_dict[self.scene]['image_struct'],
            scale=self.image_struct_dict[self.scene]['scale'],
            near_threshold=25,
            visualize_nodes=True)
        num_train = round(len(self.cluster_centers) * (1 - test_split))
        self.train_nodes = self.clusters[:num_train] / \
                           self.image_struct_dict[self.scene]['scale']
        self.test_nodes = self.clusters[num_train:] / \
                          self.image_struct_dict[self.scene]['scale']
        self.train_centers = self.cluster_centers[:num_train] / \
                             self.image_struct_dict[self.scene]['scale']
        self.test_centers = self.cluster_centers[num_train:] / \
                            self.image_struct_dict[self.scene]['scale']

        self.train_anchor_views = []
        self.test_anchor_views = []

        for nodes in self.train_nodes:
            # print(nodes)
            node_views = get_image_from_nodes(nodes=nodes,
                                              image_struct=
                                              self.image_struct_dict
                                              [self.scene]['image_struct'])
            # TODO: Make this a parameter
            sample_size = min(5, len(node_views))
            self.train_anchor_views.append(np.random.choice(node_views,
                                                      size=sample_size,
                                                      replace=False))
        for nodes in self.test_nodes:
            # print(nodes)
            node_views = get_image_from_nodes(nodes=nodes,
                                              image_struct=
                                              self.image_struct_dict
                                              [self.scene]['image_struct'])
            # TODO: Make this a parameter
            sample_size = min(5, len(node_views))
            self.test_anchor_views.append(np.random.choice(node_views,
                                                      size=sample_size,
                                                      replace=False))
Exemplo n.º 3
0
def get_bfs_nodes(root_path=None, folder=None, image_struct=None,
                  near_threshold=25, visualize_nodes=False, test_split=0.3):

    if folder is not None:
        assert root_path is not None, 'Root path must be provided with folder!!!'
        if image_struct is not None:
            warnings.warn('Image_struct parameter is not being used!')
        image_struct, scale = load_image_struct(os.path.join(root_path, folder))
    elif image_struct is None:
        raise ValueError(
            'Both folder and image_struct parameters cannot be None!!!')

    all_nodes = get_all_world_pos(image_struct=image_struct)
    all_nodes_xy = all_nodes[:, [0, 2]].copy()
    all_nodes_xy = all_nodes_xy*scale

    clustering = DBSCAN(eps=near_threshold, min_samples=1)
    labels = clustering.fit_predict(all_nodes_xy)

    # Get unique sorted labels
    labels_sorted = np.unique(labels)
    labels_sorted.sort()
    print(f'Clusters = {len(labels_sorted)}')
    num_train_nodes = round(len(labels_sorted)*(1-test_split))
    print(f'Num train nodes = {num_train_nodes}')
    nearest_cluster_centers = []

    # Find node in each cluster nearest to cluster mean
    for label in labels_sorted:
        cluster_points = all_nodes_xy[labels == label]
        cluster_mean = np.mean(cluster_points, axis=0)
        distance_from_cluster_mean = np.linalg.norm(cluster_points-cluster_mean,
                                                    axis=1)
        nearest_cluster_centers.append(cluster_points[
                                           distance_from_cluster_mean.argmin()])
    nearest_cluster_centers = np.array(nearest_cluster_centers)

    if visualize_nodes:
        plt.scatter(nearest_cluster_centers[:, 0], nearest_cluster_centers[:, 1])
        plt.axis('equal')
        plt.show()

    # Populate queue with first node
    remaining_center_nodes = nearest_cluster_centers.copy().tolist()
    start_idx = np.random.randint(len(labels_sorted))
    queue = [remaining_center_nodes[start_idx]]
    processed_idx = [start_idx]
    bfs_nodes = []

    while len(queue) != 0 and len(bfs_nodes) < num_train_nodes:

        current_node = queue.pop(0)
        bfs_nodes.append(current_node)
        # remaining_center_nodes.remove(current_node)
        print(f'Calculating for {current_node}')

        difference = np.array(remaining_center_nodes) - current_node
        distance = np.linalg.norm(difference, axis=1)
        # Normalize be nearest neighbor node distance (1 because 0 is self)
        normalized_distance = distance/sorted(distance)[1]

        # TODO: Make this a parameter
        next_hierarchy_indices = np.where(normalized_distance <= 1.3)[0]
        plt.scatter(nearest_cluster_centers[:, 0],
                    nearest_cluster_centers[:, 1])

        plt.scatter(current_node[0], current_node[1], edgecolors='black',
                    c='y', label='current')

        print(f'Neighbors are:')
        for idx in next_hierarchy_indices:
            next = remaining_center_nodes[idx]
            print(next)
            if idx not in processed_idx:
                plt.scatter(next[0], next[1], edgecolors='pink', c='brown',
                            label='neighbor')
                if remaining_center_nodes[idx] not in queue:
                    queue.append(remaining_center_nodes[idx])
                processed_idx.append(idx)

        # Last one is current node
        for coord in bfs_nodes[1:-1]:
            plt.scatter(coord[0], coord[1], edgecolors='blue', c='white')

        if len(bfs_nodes)>1:
            plt.scatter(bfs_nodes[0][0], bfs_nodes[0][1], edgecolors='r', c='r')
        # Current node

        plt.axis('equal')
        plt.legend()
        plt.show()
Exemplo n.º 4
0
def distance_sort_nodes(root_path=None, folder=None, image_struct=None,
                        scale=None, near_threshold=25, visualize_nodes=False):

    if folder is not None:
        assert root_path is not None, 'Root path must be provided with folder!!!'
        if image_struct is not None:
            warnings.warn('Image_struct parameter is not being used!')
        image_struct, scale = load_image_struct(os.path.join(root_path, folder))
    elif image_struct is None or scale is None:
        raise ValueError(
            'Both folder and image_struct/scale parameters cannot be None!!!')

    all_nodes = get_all_world_pos(image_struct=image_struct)
    all_nodes_xy = all_nodes[:, [0, 2]].copy()
    all_nodes_xy = all_nodes_xy*scale

    clustering = DBSCAN(eps=near_threshold, min_samples=1)
    labels = clustering.fit_predict(all_nodes_xy)

    # Get unique sorted labels
    labels_sorted = np.unique(labels)
    labels_sorted.sort()
    print(f'Clusters = {len(labels_sorted)}')

    nearest_cluster_centers = []

    # Find node in each cluster nearest to cluster mean
    for label in labels_sorted:
        cluster_points = all_nodes_xy[labels == label]
        cluster_mean = np.mean(cluster_points, axis=0)
        distance_from_cluster_mean = np.linalg.norm(cluster_points-cluster_mean,
                                                    axis=1)
        nearest_cluster_centers.append(cluster_points[
                                           distance_from_cluster_mean.argmin()])
    nearest_cluster_centers = np.array(nearest_cluster_centers)

    start_idx = np.random.randint(len(nearest_cluster_centers))
    start_node = nearest_cluster_centers[start_idx]
    distance_from_start_node = np.linalg.norm(nearest_cluster_centers-start_node,
                                              axis=1)
    sorted_idx = distance_from_start_node.argsort()
    sorted_cluster_centers = nearest_cluster_centers[sorted_idx]
    sorted_clusters = []

    for idx in sorted_idx:
        cluster_label = labels_sorted[idx]
        cluster_node_indices = np.where(labels==cluster_label)
        cluster_nodes = all_nodes_xy[cluster_node_indices]
        sorted_clusters.append(cluster_nodes)

    if visualize_nodes:
        plt.scatter(nearest_cluster_centers[:, 0], nearest_cluster_centers[:, 1])

        plt.plot(start_node[0], start_node[1], 'ro', label='Starting Node')

        for i in range(0):
            plt.scatter(sorted_cluster_centers[i][0], sorted_cluster_centers[i][1], edgecolors='b',
                        c='white')
        plt.title('Center nodes of the cluster')
        plt.axis('equal')
        plt.legend()
        plt.show()

    return sorted_cluster_centers/scale, np.array(sorted_clusters)/scale
    def __init__(self, dataset_path, proposals_path, triplet_save_path, scene,
                 camera_intrinsics, sampling_params, image_params,
                 match_params, plot_params):
        super(TripletGeneratorNode,
              self).__init__(dataset_path=dataset_path,
                             proposals_path=proposals_path,
                             triplet_save_path=triplet_save_path)

        self.scene = scene
        self.camera_intrinsics = camera_intrinsics
        self.sampling_params = sampling_params
        self.image_params = image_params
        self.match_params = match_params
        self.plot_params = plot_params

        # SET PATHS
        self.scene_path = os.path.join(dataset_path, scene)
        self.img_folder_path = os.path.join(self.scene_path, 'jpg_rgb')
        self.depth_folder_path = os.path.join(self.scene_path,
                                              'high_res_depth')

        # LOAD ANNOTATIONS AND MATLAB FILES
        with open(os.path.join(self.scene_path, 'annotations.json')) as f:
            self.annotation = json.load(f)
        self.image_struct, self.scale = load_image_struct(self.scene_path)

        # RESCALE TO PROPOSAL IMAGE SIZE
        self.cx_prop = self.camera_intrinsics['cx'] * self.image_params[
            'x_scale_org_to_proposal']
        self.cy_prop = self.camera_intrinsics['cy'] * self.image_params[
            'y_scale_org_to_proposal']
        self.fx_prop = self.camera_intrinsics['fx'] * self.image_params[
            'x_scale_org_to_proposal']
        self.fy_prop = self.camera_intrinsics['fy'] * self.image_params[
            'y_scale_org_to_proposal']

        # MAKE FOLDERS FOR SAVING
        triplet_train_test_folder = ['Train', 'Test']

        for set_folder in triplet_train_test_folder:
            triplet_set_path = os.path.join(self.triplet_save_path, set_folder)

            if os.path.exists(triplet_set_path):
                if len(os.listdir(triplet_set_path)) != 0:
                    print(f'Path {triplet_set_path} already exists!')
                    raise Exception
            else:
                os.mkdir(triplet_set_path)
        self.plot_save_path = os.path.join(self.triplet_save_path, 'plots')

        if os.path.exists(self.plot_save_path):
            if len(os.listdir(self.plot_save_path)) != 0:
                print(f'Path {self.plot_save_path} already exists!')
                raise Exception
        else:
            os.mkdir(self.plot_save_path)

        # LOGGING
        self.logger = logging.getLogger(__name__)
        self.start_time = datetime.now().strftime("%Y-%m-%d--%H:%M:%S")
        logging.basicConfig(filename=os.path.join(
            'logs', f'triplet_gen_{self.start_time}.log'),
                            filemode='w')
        self.logger.setLevel(logging.DEBUG)
Exemplo n.º 6
0
    def __init__(self, dataset_root, proposals_root, triplet_root,
                 proposals_per_img=None, hierarchy=None,
                 sample_per_hierarchy=None, proposals_neighbor_img=None,
                 neg_bbox_iou_thresh=None):

        camera_intrinsics = camera['camera_intrinsics']

        if proposals_per_img is None:
            proposals_per_img = pixel_bbox_assoc['proposals_per_img']
        if hierarchy is None:
            hierarchy = pixel_bbox_assoc['hierarchy']
        if sample_per_hierarchy is None:
            sample_per_hierarchy = pixel_bbox_assoc['sample_per_hierarchy']
        if proposals_neighbor_img is None:
            proposals_neighbor_img = pixel_bbox_assoc['proposals_neighbor_img']
        if neg_bbox_iou_thresh is None:
            neg_bbox_iou_thresh = pixel_bbox_assoc['neg_bbox_iou_thresh']

        self.dataset_root = dataset_root
        self.proposals_root = proposals_root
        self.triplet_root = triplet_root
        self.camera_intrinsics = camera_intrinsics
        self.proposals_per_img = proposals_per_img
        self.hierarchy = hierarchy
        self.sample_per_hierarchy = sample_per_hierarchy
        self.proposals_neighbor_img = proposals_neighbor_img
        self.neg_bbox_iou_thresh = neg_bbox_iou_thresh

        self.image_struct_dict = dict()  # k=folder, v={'img_struct', 'scale'}
        self.avd_annotation = dict()
        self.image_folder_map = dict()   # k='img_name', v='img_folder
        self.proposal_filenames = []
        self.proposal_img_filenames = []
        self.neighbors_dict = dict()

        # print(f'Loading Image Parameters and Annotations...')

        # Load image folder map from coco annotation for both train and test
        for j in ['instances_set_1_train.json', 'instances_set_1_test.json']:
            with open(os.path.join(dataset_root, 'coco_annotations', j)) as f:
                self.image_folder_map.update(json.load(f)['img_folder_map'])

        # Load parameters and annotations for each folder in dataset
        for folder in os.listdir(dataset_root):
            folder_path = os.path.join(dataset_root, folder)

            if os.path.isdir(folder_path) and folder.startswith(scene_list):
                img_st, scale = load_image_struct(folder_path)
                self.image_struct_dict[folder] = {'image_struct': img_st,
                                                  'scale': scale}
                ann_path = os.path.join(folder_path, 'annotations.json')
                with open(ann_path, 'r') as f:
                    self.avd_annotation.update(json.load(f))

        # Get neighbors of each proposal img
        # print(f'Creating Neighbors list...')

        for i in os.listdir(proposals_root):

            if i.endswith('.pt'):
                self.proposal_filenames.append(i)
                pt_img_file = self.convert_pt_jpg(i)
                self.proposal_img_filenames.append(pt_img_file)
                self.neighbors_dict[pt_img_file] = \
                    get_hierarchy_neighbors(pt_img_file,
                                            ann=self.avd_annotation,
                                            hierarchy=self.hierarchy)