Пример #1
0
    def test_raises_exception_if_a_required_file_is_not_found(self):
        sequence_id = 5
        root_path = self.temp_folder / 'root'
        self.make_required_files(root_path, sequence_id)
        shutil.rmtree(root_path / 'sequences' / "{0:02}".format(sequence_id) / 'image_2')
        with self.assertRaises(FileNotFoundError):
            kitti_loader.find_root(root_path, sequence_id)

        self.make_required_files(root_path, sequence_id)
        shutil.rmtree(root_path / 'sequences' / "{0:02}".format(sequence_id) / 'image_3')
        with self.assertRaises(FileNotFoundError):
            kitti_loader.find_root(root_path, sequence_id)

        self.make_required_files(root_path, sequence_id)
        (root_path / 'sequences' / "{0:02}".format(sequence_id) / 'calib.txt').unlink()
        with self.assertRaises(FileNotFoundError):
            kitti_loader.find_root(root_path, sequence_id)

        self.make_required_files(root_path, sequence_id)
        (root_path / 'sequences' / "{0:02}".format(sequence_id) / 'times.txt').unlink()
        with self.assertRaises(FileNotFoundError):
            kitti_loader.find_root(root_path, sequence_id)

        self.make_required_files(root_path, sequence_id)
        (root_path / 'poses' / ('{0:02}.txt'.format(sequence_id))).unlink()
        with self.assertRaises(FileNotFoundError):
            kitti_loader.find_root(root_path, sequence_id)

        # Clean up after ourselves
        shutil.rmtree(root_path)
Пример #2
0
    def test_searches_recursively(self):
        # Create a deeply nested folder structure
        sequence_num = 3
        base_root = Path(self.temp_folder)
        true_sequence = 3, 0, 2
        decoy_sequence = 2, 1, 1
        true_path = ''
        for lvl1 in range(5):
            lvl1_path = base_root / "folder_{0}".format(lvl1)
            for lvl2 in range(4):
                lvl2_path = lvl1_path / "folder_{0}".format(lvl2)
                for lvl3 in range(3):
                    path = lvl2_path / "folder_{0}".format(lvl3)
                    path.mkdir(parents=True, exist_ok=True)
                    if (lvl1, lvl2, lvl3) == true_sequence:
                        true_path = path
                        self.make_required_files(true_path, sequence_num)
                    elif (lvl1, lvl2, lvl3) == decoy_sequence:
                        self.make_required_files(path, 2)
                    else:
                        (path / 'decoy.txt').touch()

        # Search that structure for the one folder that has all we need
        result = kitti_loader.find_root(base_root, sequence_num)
        self.assertEqual(true_path, result)

        # Clean up after ourselves
        shutil.rmtree(base_root)
Пример #3
0
    def test_finds_root_with_required_files(self):
        sequence_id = 7
        root_path = self.temp_folder / 'test_root'
        self.make_required_files(root_path, sequence_id)

        result = kitti_loader.find_root(root_path, sequence_id)
        self.assertEqual(root_path, result)

        # Clean up after ourselves
        shutil.rmtree(root_path)
Пример #4
0
 def find_roots(cls, root: typing.Union[str, bytes, PathLike, PurePath]):
     """
     Recursively search for the directories to import from the root folder.
     We're looking for folders with the same names as the
     :param root:
     :return:
     """
     root = Path(root)
     actual_roots = {}
     for sequence_number in range(11):
         try:
             actual_roots[sequence_number] = kitti_loader.find_root(
                 root, sequence_number)
         except FileNotFoundError:
             continue
     return actual_roots
Пример #5
0
def verify_dataset(image_collection: ImageCollection, root_folder: typing.Union[str, Path],
                   sequence_number: int, repair: bool = False):
    """
    Load a KITTI image sequences into the database.
    :return:
    """
    root_folder = Path(root_folder)
    sequence_number = int(sequence_number)
    repair = bool(repair)
    if not 0 <= sequence_number < 11:
        raise ValueError("Cannot import sequence {0}, it is invalid".format(sequence_number))
    root_folder = kitti_loader.find_root(root_folder, sequence_number)
    data = pykitti.odometry(root_folder, sequence="{0:02}".format(sequence_number))
    image_group = f"KITTI_{sequence_number:06}"
    valid = True
    irreparable = False

    # Check the Image Collection
    if image_collection.image_group != image_group:
        if repair:
            image_collection.image_group = image_group
            image_collection.save()
            logging.getLogger(__name__).info(
                f"Fixed incorrect image group for {image_collection.sequence_name}")
        else:
            logging.getLogger(__name__).warning(
                f"{image_collection.sequence_name} has incorrect image group {image_collection.image_group}")
            valid = False

    # dataset.calib:      Calibration data are accessible as a named tuple
    # dataset.timestamps: Timestamps are parsed into a list of timedelta objects
    # dataset.poses:      Generator to load ground truth poses T_w_cam0
    # dataset.camN:       Generator to load individual images from camera N
    # dataset.gray:       Generator to load monochrome stereo pairs (cam0, cam1)
    # dataset.rgb:        Generator to load RGB stereo pairs (cam2, cam3)
    # dataset.velo:       Generator to load velodyne scans as [x,y,z,reflectance]
    total_invalid_images = 0
    total_fixed_images = 0
    with arvet.database.image_manager.get().get_group(image_group, allow_write=repair):
        for img_idx, (left_image, right_image, timestamp, pose) in enumerate(
                zip(data.cam2, data.cam3, data.timestamps, data.poses)):
            changed = False
            img_valid = True
            if img_idx >= len(image_collection):
                logging.getLogger(__name__).error(f"Image {img_idx} is missing from the dataset")
                irreparable = True
                valid = False
                continue

            left_image = np.array(left_image)
            right_image = np.array(right_image)
            left_hash = bytes(xxhash.xxh64(left_image).digest())
            right_hash = bytes(xxhash.xxh64(right_image).digest())

            # Load the image object from the database
            try:
                _, image = image_collection[img_idx]
            except (KeyError, IOError, RuntimeError):
                logging.getLogger(__name__).exception(f"Error loading image object {img_idx}")
                valid = False
                total_invalid_images += 1
                continue

            # First, check the image group
            if image.image_group != image_group:
                if repair:
                    image.image_group = image_group
                    changed = True
                logging.getLogger(__name__).warning(f"Image {img_idx} has incorrect group {image.image_group}")
                valid = False

            # Load the pixels from the image
            try:
                left_actual_pixels = image.left_pixels
            except (KeyError, IOError, RuntimeError):
                left_actual_pixels = None
            try:
                right_actual_pixels = image.right_pixels
            except (KeyError, IOError, RuntimeError):
                right_actual_pixels = None

            # Compare the loaded image data to the data read from disk
            if left_actual_pixels is None or not np.array_equal(left_image, left_actual_pixels):
                if repair:
                    image.store_pixels(left_image)
                    changed = True
                else:
                    logging.getLogger(__name__).error(f"Image {img_idx}: Left pixels do not match data read from disk")
                valid = False
                img_valid = False
            if left_hash != bytes(image.metadata.img_hash):
                if repair:
                    image.metadata.img_hash = left_hash
                    changed = True
                else:
                    logging.getLogger(__name__).error(f"Image {img_idx}: Left hash does not match metadata")
                valid = False
                img_valid = False
            if right_actual_pixels is None or not np.array_equal(right_image, right_actual_pixels):
                if repair:
                    image.store_right_pixels(right_image)
                    changed = True
                else:
                    logging.getLogger(__name__).error(f"Image {img_idx}: Right pixels do not match data read from disk")
                valid = False
                img_valid = False
            if right_hash != bytes(image.right_metadata.img_hash):
                if repair:
                    image.right_metadata.img_hash = right_hash
                    changed = True
                else:
                    logging.getLogger(__name__).error(f"Image {img_idx}: Right hash does not match metadata")
                valid = False
                img_valid = False
            if changed and repair:
                logging.getLogger(__name__).warning(f"Image {img_idx}: repaired")
                image.save()
                total_fixed_images += 1
            if not img_valid:
                total_invalid_images += 1

    if irreparable:
        # Images are missing entirely, needs re-import
        logging.getLogger(__name__).error(f"Image Collection {image_collection.pk} for sequence "
                                          f"{image_collection.sequence_name} is IRREPARABLE, invalidate and re-import")
    elif repair:
        # Re-save the modified image collection
        logging.getLogger(__name__).info(f"{image_collection.sequence_name} repaired successfully "
                                         f"({total_fixed_images} image files fixed).")
    elif valid:
        logging.getLogger(__name__).info(f"Verification of {image_collection.sequence_name} successful.")
    else:
        logging.getLogger(__name__).error(
            f"Verification of {image_collection.sequence_name} ({image_collection.pk}) "
            f"FAILED, ({total_invalid_images} images failed)")
    return valid