Esempio n. 1
0
    def get_fragment(self, idx):

        match = np.load(osp.join(self.path_match, "matches{:06d}.npy".format(idx)), allow_pickle=True).item()
        data_source = torch.load(match["path_source"]).to(torch.float)
        data_target = torch.load(match["path_target"]).to(torch.float)
        new_pair = torch.from_numpy(match["pair"])

        if self.transform is not None:
            data_source = self.transform(data_source)
            data_target = self.transform(data_target)

        if(hasattr(data_source, "multiscale")):
            batch = MultiScalePair.make_pair(data_source, data_target)
        else:
            batch = Pair.make_pair(data_source, data_target)
        if self.is_online_matching:
            new_match = compute_overlap_and_matches(
                Data(pos=data_source.pos), Data(pos=data_target.pos), self.max_dist_overlap
            )
            batch.pair_ind = torch.from_numpy(new_match["pair"].copy())
        else:
            pair = tracked_matches(data_source, data_target, new_pair)
            batch.pair_ind = pair

        num_pos_pairs = len(batch.pair_ind)
        if self.num_pos_pairs < len(batch.pair_ind):
            num_pos_pairs = self.num_pos_pairs

        rand_ind = torch.randperm(len(batch.pair_ind))[:num_pos_pairs]
        batch.pair_ind = batch.pair_ind[rand_ind]
        batch.size_pair_ind = torch.tensor([num_pos_pairs])
        return batch.contiguous()
Esempio n. 2
0
    def get_fragment(self, idx):

        data_source, data_target, new_pair = self.get_raw_pair(idx)
        if self.transform is not None:
            data_source = self.transform(data_source)
            data_target = self.transform(data_target)

        if (hasattr(data_source, "multiscale")):
            batch = MultiScalePair.make_pair(data_source, data_target)
        else:
            batch = Pair.make_pair(data_source, data_target)
        if self.is_online_matching:
            new_match = compute_overlap_and_matches(Data(pos=data_source.pos),
                                                    Data(pos=data_target.pos),
                                                    self.max_dist_overlap)
            batch.pair_ind = torch.from_numpy(new_match["pair"].copy())
        else:
            pair = tracked_matches(data_source, data_target, new_pair)
            batch.pair_ind = pair

        num_pos_pairs = len(batch.pair_ind)
        if self.num_pos_pairs < len(batch.pair_ind):
            num_pos_pairs = self.num_pos_pairs

        if not self.use_fps or (float(num_pos_pairs) / len(batch.pair_ind) >=
                                1):
            rand_ind = torch.randperm(len(batch.pair_ind))[:num_pos_pairs]
        else:
            rand_ind = fps_sampling(batch.pair_ind, batch.pos, num_pos_pairs)
        batch.pair_ind = batch.pair_ind[rand_ind]
        batch.size_pair_ind = torch.tensor([num_pos_pairs])
        if (len(batch.pair_ind) == 0):
            print("Warning")
        return batch.contiguous()
Esempio n. 3
0
    def _compute_matches_between_fragments(self):
        ind = 0
        out_dir = osp.join(self.processed_dir, "test", "matches")
        if files_exist([out_dir]):  # pragma: no cover
            return
        makedirs(out_dir)

        list_scene = os.listdir(osp.join(self.raw_dir, "test"))
        for scene in list_scene:
            if (osp.isfile(osp.join(self.raw_dir, "test", scene))):
                continue
            path_log = osp.join(self.raw_dir, "test", scene + "_global.txt")
            list_pair = BasePCRBTest.parse_pair_files(path_log)
            for i, pair in enumerate(list_pair):
                path1 = osp.join(
                    self.processed_dir, "test", 'fragment', scene,
                    'fragment_{:06d}.pt'.format(find_int(pair["source_name"])))
                path2 = osp.join(
                    self.processed_dir, "test", 'fragment', scene,
                    'fragment_{:06d}.pt'.format(find_int(pair["target_name"])))
                data1 = torch.load(path1)
                data2 = torch.load(path2)
                match = compute_overlap_and_matches(data1, data2,
                                                    self.max_dist_overlap)
                match['path_source'] = path1
                match['path_target'] = path2
                match['name_source'] = pair["source_name"]
                match['name_target'] = pair["target_name"]
                match['scene'] = scene
                match['trans'] = pair["trans"]
                out_path = osp.join(self.processed_dir, "test", 'matches',
                                    'matches{:06d}.npy'.format(ind))
                np.save(out_path, match)
                ind += 1
Esempio n. 4
0
    def _compute_matches_between_fragments(self):
        ind = 0
        out_dir = osp.join(self.processed_dir, "test", "matches")
        if files_exist([out_dir]):  # pragma: no cover
            return
        makedirs(out_dir)

        list_scene = os.listdir(osp.join(self.raw_dir, "test"))
        for scene in list_scene:
            path_log = osp.join(self.raw_dir, "test", scene, "gt.log")
            list_pair_num, list_mat = read_gt_log(path_log)
            for i, pair in enumerate(list_pair_num):
                path1 = osp.join(self.processed_dir, "test", 'fragment', scene,
                                 'fragment_{:06d}.pt'.format(pair[0]))
                path2 = osp.join(self.processed_dir, "test", 'fragment', scene,
                                 'fragment_{:06d}.pt'.format(pair[1]))
                data1 = torch.load(path1)
                data2 = torch.load(path2)
                match = compute_overlap_and_matches(
                    data1,
                    data2,
                    self.max_dist_overlap,
                    trans_gt=torch.from_numpy(np.linalg.inv(list_mat[i])).to(
                        data1.pos.dtype))
                match['path_source'] = path1
                match['path_target'] = path2
                match['name_source'] = str(pair[0])
                match['name_target'] = str(pair[1])
                match['scene'] = scene
                out_path = osp.join(self.processed_dir, "test", 'matches',
                                    'matches{:06d}.npy'.format(ind))
                np.save(out_path, match)
                ind += 1
Esempio n. 5
0
    def get_fragment(self, idx):

        match = np.load(osp.join(self.path_match,
                                 'matches{:06d}.npy'.format(idx)),
                        allow_pickle=True).item()
        data_source = torch.load(match['path_source'])
        data_target = torch.load(match['path_target'])
        # new_pair = compute_subsampled_matches(data_source, data_target,self.voxel_size_search,self.max_dist_overlap)
        new_pair = torch.from_numpy(match['pair'])

        if (self.transform is not None):
            data_source = self.transform(data_source)
            data_target = self.transform(data_target)

        batch = Pair.make_pair(data_source, data_target)
        if (self.is_online_matching):
            new_match = compute_overlap_and_matches(Data(pos=data_source.pos),
                                                    Data(pos=data_target.pos),
                                                    self.max_dist_overlap)
            batch.pair_ind = torch.from_numpy(new_match['pair'].copy())
        else:
            pair = tracked_matches(data_source, data_target, new_pair)
            batch.pair_ind = pair

        num_pos_pairs = len(batch.pair_ind)
        if self.num_pos_pairs < len(batch.pair_ind):
            num_pos_pairs = self.num_pos_pairs

        rand_ind = torch.randperm(len(batch.pair_ind))[:num_pos_pairs]
        batch.pair_ind = batch.pair_ind[rand_ind]
        batch.size_pair_ind = torch.tensor([num_pos_pairs])
        return batch.contiguous().to(torch.float)
    def _compute_matches_between_fragments(self, mod):

        out_dir = osp.join(self.processed_dir, mod, 'matches')
        if files_exist([out_dir]):  # pragma: no cover
            return
        makedirs(out_dir)

        for scene_path in os.listdir(osp.join(self.raw_dir, mod)):

            list_seq = sorted([
                f for f in os.listdir(osp.join(self.raw_dir, mod, scene_path))
                if 'seq' in f
            ])
            for seq in list_seq:
                log.info("{}, {}".format(scene_path, seq))
                fragment_dir = osp.join(self.processed_dir, mod, 'fragment',
                                        scene_path, seq)
                list_fragment_path = sorted([
                    osp.join(fragment_dir, f) for f in os.listdir(fragment_dir)
                    if 'fragment' in f
                ])
                log.info("compute_overlap_and_matches")
                ind = 0
                for path1 in list_fragment_path:
                    for path2 in list_fragment_path:
                        if path1 < path2:
                            out_path = osp.join(
                                out_dir, 'matches{:06d}.npy'.format(ind))

                            match = compute_overlap_and_matches(
                                path1, path2, self.max_dist_overlap)
                            if (self.verbose):
                                log.info(match['path_source'],
                                         match['path_target'],
                                         'overlap={}'.format(match['overlap']))
                            if (np.max(
                                    match['overlap']) > self.min_overlap_ratio
                                    and np.max(match['overlap']) <
                                    self.max_overlap_ratio):
                                np.save(out_path, match)
                                ind += 1
Esempio n. 7
0
    def _compute_matches_between_fragments(self, mod):
        out_dir = osp.join(self.processed_dir, mod, 'matches')
        if files_exist([out_dir]):  # pragma: no cover
            return
        makedirs(out_dir)
        ind = 0
        list_drive = self.dict_seq[mod]
        for drive in list_drive:
            path_fragment = osp.join(self.processed_dir, mod, "fragment",
                                     "{:02d}".format(drive))
            list_name_frames = sorted(
                [f for f in os.listdir(path_fragment) if "pt" in f])

            # pre_compute specific pair
            log.info("Compute the pairs")
            if (self.min_dist is not None):
                pair_time_frame = compute_spaced_time_frame(
                    list_name_frames, path_fragment, self.min_dist)
            else:
                pair_time_frame = [
                    (i, j) for i in range(len(list_name_frames))
                    for j in range(len(list_name_frames))
                    if (j - i) > 0 and (j - i) < self.max_time_distance
                ]
            log.info("Compute the matches")
            for i, j in pair_time_frame:
                out_path = osp.join(out_dir, 'matches{:06d}.npy'.format(ind))
                path1 = osp.join(path_fragment, list_name_frames[i])
                path2 = osp.join(path_fragment, list_name_frames[j])
                data1 = torch.load(path1)
                data2 = torch.load(path2)
                match = compute_overlap_and_matches(data1, data2,
                                                    self.max_dist_overlap)
                match['path_source'] = path1
                match['path_target'] = path2
                match['name_source'] = i
                match['name_target'] = j
                match['scene'] = drive
                np.save(out_path, match)
                ind += 1