Esempio n. 1
0
    def _init_files_and_annotations(self, *args, **kwargs):
        """Create files and annotations."""
        if self.subset == "train":
            subset = "train_validation"
        elif self.subset == "validation" or self.subset == "test":
            subset = "test"

        os.makedirs(lmenv.TMP_DIR, exist_ok=True)
        files_path = join(lmenv.TMP_DIR,
                          subset + "_pascal20072012_files.pickle")
        annos_path = join(lmenv.TMP_DIR,
                          subset + "_pascal20072012_annos.pickle")

        if (not (isfile(files_path) and isfile(annos_path))):
            if subset == "train_validation":
                pascalvoc_2007 = Pascalvoc2007(subset=subset, *args, **kwargs)
                pascalvoc_2012 = Pascalvoc2012(subset=subset, *args, **kwargs)
                self.files = pascalvoc_2007.files + pascalvoc_2012.files
                self.annotations = pascalvoc_2007.annotations + pascalvoc_2012.annotations
            elif subset == "test":
                pascalvoc_2007 = Pascalvoc2007(subset=subset, *args, **kwargs)
                self.files = pascalvoc_2007.files
                self.annotations = pascalvoc_2007.annotations

            with open(files_path, "wb") as fp:
                pickle.dump(self.files, fp)
            with open(annos_path, "wb") as fp:
                pickle.dump(self.annotations, fp)
            print("done saved pickle")
        else:
            print("loading from pickle file: {}".format(files_path))
            with open(files_path, "rb") as fp:
                self.files = pickle.load(fp)
            with open(annos_path, "rb") as fp:
                self.annotations = pickle.load(fp)
Esempio n. 2
0
    def _init_files_and_annotations(self, *args, **kwargs):
        """Create files and annotations."""
        if self.subset == "train":
            subset = "train_validation"
        elif self.subset == "validation" or self.subset == "test":
            subset = "test"

        if subset == "train_validation":
            pascalvoc_2007 = Pascalvoc2007(subset=subset,
                                           skip_difficult=self.skip_difficult,
                                           *args,
                                           **kwargs)
            pascalvoc_2012 = Pascalvoc2012(subset=subset,
                                           skip_difficult=self.skip_difficult,
                                           *args,
                                           **kwargs)
            self.files = pascalvoc_2007.files + pascalvoc_2012.files
            self.annotations = pascalvoc_2007.annotations + pascalvoc_2012.annotations
        elif subset == "test":
            pascalvoc_2007 = Pascalvoc2007(subset=subset,
                                           skip_difficult=self.skip_difficult,
                                           *args,
                                           **kwargs)
            self.files = pascalvoc_2007.files
            self.annotations = pascalvoc_2007.annotations
Esempio n. 3
0
def test_hue():
    batch_size = 3
    image_size = [256, 512]
    dataset = Pascalvoc2007(
        batch_size=batch_size, pre_processor=ResizeWithGtBoxes(image_size),
        augmentor=Hue((-10, 10)),
    )
    dataset = DatasetIterator(dataset)

    for _ in range(5):
        images, labels = dataset.feed()
        _show_images_with_boxes(images, labels)
Esempio n. 4
0
def test_filp_top_bottom():
    batch_size = 3
    image_size = [256, 512]
    dataset = Pascalvoc2007(
        batch_size=batch_size, pre_processor=ResizeWithGtBoxes(image_size),
        augmentor=FlipTopBottom(),
    )
    dataset = DatasetIterator(dataset)

    for _ in range(5):
        images, labels = dataset.feed()
        _show_images_with_boxes(images, labels)
Esempio n. 5
0
def test_ssd_random_crop():
    batch_size = 3
    image_size = [256, 512]
    dataset = Pascalvoc2007(
        batch_size=batch_size, pre_processor=ResizeWithGtBoxes(image_size),
        augmentor=SSDRandomCrop(),
    )
    dataset = DatasetIterator(dataset)

    for _ in range(5):
        images, labels = dataset.feed()
        _show_images_with_boxes(images, labels)
        assert np.all(labels[:, :, 2] <= 512)
        assert np.all(labels[:, :, 3] <= 256)
Esempio n. 6
0
def test_sequence():
    batch_size = 3
    image_size = [256, 512]
    augmentor = Sequence([
        FlipLeftRight(),
        FlipTopBottom(),
        SSDRandomCrop(),
    ])

    dataset = Pascalvoc2007(
        batch_size=batch_size, pre_processor=ResizeWithGtBoxes(image_size),
        augmentor=augmentor,
    )
    dataset = DatasetIterator(dataset)

    for _ in range(5):
        images, labels = dataset.feed()
        _show_images_with_boxes(images, labels)
Esempio n. 7
0
def test_pascalvoc_2007_not_skip_difficult():
    batch_size = 3
    image_size = [256, 512]

    num_max_boxes = 42
    num_train = 2501
    num_validation = 2510
    num_test = 4952

    assert Pascalvoc2007.count_max_boxes(skip_difficult=False) == num_max_boxes

    dataset = Pascalvoc2007(batch_size=batch_size,
                            pre_processor=ResizeWithGtBoxes(image_size),
                            skip_difficult=False)
    dataset = DatasetIterator(dataset)
    assert dataset.num_per_epoch == num_train

    val_dataset = Pascalvoc2007(subset="validation",
                                batch_size=batch_size,
                                pre_processor=ResizeWithGtBoxes(image_size),
                                skip_difficult=False)
    val_dataset = DatasetIterator(val_dataset)
    assert val_dataset.num_per_epoch == num_validation

    test_dataset = Pascalvoc2007(subset="test",
                                 batch_size=batch_size,
                                 pre_processor=ResizeWithGtBoxes(image_size),
                                 skip_difficult=False)
    test_dataset = DatasetIterator(test_dataset)
    assert test_dataset.num_per_epoch == num_test

    for _ in range(STEP_SIZE):
        images, labels = dataset.feed()

        assert isinstance(images, np.ndarray)
        assert images.shape[0] == batch_size
        assert images.shape[1] == image_size[0]
        assert images.shape[2] == image_size[1]
        assert images.shape[3] == 3

        assert isinstance(labels, np.ndarray)
        assert labels.shape[0] == batch_size
        assert labels.shape[1] == num_max_boxes
        assert labels.shape[2] == 5

    for _ in range(STEP_SIZE):
        images, labels = val_dataset.feed()

        assert isinstance(images, np.ndarray)
        assert images.shape[0] == batch_size
        assert images.shape[1] == image_size[0]
        assert images.shape[2] == image_size[1]
        assert images.shape[3] == 3

        assert isinstance(labels, np.ndarray)
        assert labels.shape[0] == batch_size
        assert labels.shape[1] == num_max_boxes
        assert labels.shape[2] == 5

    for _ in range(STEP_SIZE):
        images, labels = test_dataset.feed()

        assert isinstance(images, np.ndarray)
        assert images.shape[0] == batch_size
        assert images.shape[1] == image_size[0]
        assert images.shape[2] == image_size[1]
        assert images.shape[3] == 3

        assert isinstance(labels, np.ndarray)
        assert labels.shape[0] == batch_size
        assert labels.shape[1] == num_max_boxes
        assert labels.shape[2] == 5