Esempio n. 1
0
    def test_get_column_names__all_columns(self):
        batch = ia.Batch(
            images=np.zeros((1, 2, 2, 3), dtype=np.uint8),
            heatmaps=[np.zeros((2, 2, 1), dtype=np.float32)],
            segmentation_maps=[np.zeros((2, 2, 1), dtype=np.int32)],
            keypoints=[
                ia.KeypointsOnImage([ia.Keypoint(x=0, y=0)], shape=(2, 2, 3))
            ],
            bounding_boxes=[
                ia.BoundingBoxesOnImage([ia.BoundingBox(0, 0, 1, 1)],
                                        shape=(2, 2, 3))
            ],
            polygons=[
                ia.PolygonsOnImage([ia.Polygon([(0, 0), (1, 0), (1, 1)])],
                                   shape=(2, 2, 3))
            ],
            line_strings=[
                ia.LineStringsOnImage([ia.LineString([(0, 0), (1, 0)])],
                                      shape=(2, 2, 3))
            ])

        names = batch.get_column_names()

        assert names == [
            "images", "heatmaps", "segmentation_maps", "keypoints",
            "bounding_boxes", "polygons", "line_strings"
        ]
Esempio n. 2
0
 def load_batches():
     for i in range(int(Xtrain.shape[0] / batch_size)):
         st = i * batch_size
         ed = (i + 1) * batch_size
         batch = ia.Batch(images=Xtrain[st:ed, :, :, :],
                          data=Ytrain[st:ed, :])
         yield batch
Esempio n. 3
0
 def test_deepcopy_no_arguments(self):
     batch = ia.Batch()
     observed = batch.deepcopy()
     keys = list(observed.__dict__.keys())
     assert len(keys) >= 14
     for attr_name in keys:
         assert getattr(observed, attr_name) is None
    def evaluateAll(self,ds, fold:int,stage=-1,negatives="real"):
        folds = self.kfold(ds, range(0, len(ds)))
        vl, vg, test_g = folds.generator(fold, False,negatives=negatives,returnBatch=True);
        indexes = folds.sampledIndexes(fold, False, negatives)
        m = self.load_model(fold, stage)
        num=0
        with tqdm.tqdm(total=len(indexes), unit="files", desc="segmentation of validation set from " + str(fold)) as pbar:
            try:
                for f in test_g():
                    if num>=len(indexes): break
                    x, y, b = f
                    z = m.predict(x)
                    ids=[]
                    augs=[]
                    for i in range(0,len(z)):
                        if num >= len(indexes): break
                        orig=b.images[i]
                        num = num + 1
                        ma=z[i]
                        id=b.data[i]
                        segmentation_maps_aug = [imgaug.SegmentationMapOnImage(ma, ma.shape)];
                        augmented = imgaug.augmenters.Scale(
                                    {"height": orig.shape[0], "width": orig.shape[1]}).augment_segmentation_maps(segmentation_maps_aug)
                        ids.append(id)
                        augs=augs+augmented

                    res=imgaug.Batch(images=b.images,data=ids,segmentation_maps=b.segmentation_maps)
                    res.predicted_maps_aug=augs
                    yield res
                    pbar.update(len(ids))
            finally:
                vl.terminate();
                vg.terminate();
        pass
Esempio n. 5
0
    def test_to_batch(self):
        batch_before_aug = ia.Batch()
        batch_before_aug.images_unaug = 0
        batch_before_aug.heatmaps_unaug = 1
        batch_before_aug.segmentation_maps_unaug = 2
        batch_before_aug.keypoints_unaug = 3
        batch_before_aug.bounding_boxes_unaug = 4
        batch_before_aug.polygons_unaug = 5
        batch_before_aug.line_strings_unaug = 6

        batch_inaug = _BatchInAugmentation(images=10,
                                           heatmaps=20,
                                           segmentation_maps=30,
                                           keypoints=40,
                                           bounding_boxes=50,
                                           polygons=60,
                                           line_strings=70)

        batch = batch_inaug.to_batch(batch_before_aug)

        assert batch.images_unaug == 0
        assert batch.heatmaps_unaug == 1
        assert batch.segmentation_maps_unaug == 2
        assert batch.keypoints_unaug == 3
        assert batch.bounding_boxes_unaug == 4
        assert batch.polygons_unaug == 5
        assert batch.line_strings_unaug == 6

        assert batch.images_aug == 10
        assert batch.heatmaps_aug == 20
        assert batch.segmentation_maps_aug == 30
        assert batch.keypoints_aug == 40
        assert batch.bounding_boxes_aug == 50
        assert batch.polygons_aug == 60
        assert batch.line_strings_aug == 70
Esempio n. 6
0
    def load_batches():
        # Here, load 10 batches of size 4 each.
        # You can also load an infinite amount of batches, if you don't train
        # in epochs.
        batch_size = 4
        nb_batches = 10

        # Here, for simplicity we just always use the same image.
        astronaut = data.astronaut()
        astronaut = ia.imresize_single_image(astronaut, (64, 64))

        for i in range(nb_batches):
            # A list containing all images of the batch.
            batch_images = []
            # A list containing IDs per image. This is not necessary for the
            # background augmentation and here just used to showcase that you
            # can transfer additional information.
            batch_data = []

            # Add some images to the batch.
            for b in range(batch_size):
                batch_images.append(astronaut)
                batch_data.append((i, b))

            # Create the batch object to send to the background processes.
            batch = ia.Batch(
                images=np.array(batch_images, dtype=np.uint8),
                data=batch_data
            )

            yield batch
Esempio n. 7
0
def load_images(n_batches=10, sleep=0.0):
    batch_size = 4
    astronaut = data.astronaut()
    astronaut = ia.imresize_single_image(astronaut, (64, 64))
    kps = ia.KeypointsOnImage([ia.Keypoint(x=15, y=25, vis=None, label=None)],
                              shape=astronaut.shape)
    counter = 0
    for i in range(n_batches):
        batch_images = []
        batch_kps = []
        for b in range(batch_size):
            astronaut_text = ia.draw_text(astronaut,
                                          x=0,
                                          y=0,
                                          text="%d" % (counter, ),
                                          color=[0, 255, 0],
                                          size=16)
            batch_images.append(astronaut_text)
            batch_kps.append(kps)
            counter += 1
        batch = ia.Batch(images=np.array(batch_images, dtype=np.uint8),
                         keypoints=batch_kps)
        yield batch
        if sleep > 0:
            time.sleep(sleep)
Esempio n. 8
0
    def test_property_warnings(self):
        batch = ia.Batch()
        # self.assertWarns does not exist in py2.7
        with warnings.catch_warnings(record=True) as caught_warnings:
            warnings.simplefilter("always")

            _ = batch.images
            assert len(caught_warnings) == 1
            assert "is deprecated" in str(caught_warnings[-1].message)

            _ = batch.heatmaps
            assert len(caught_warnings) == 2
            assert "is deprecated" in str(caught_warnings[-1].message)

            _ = batch.segmentation_maps
            assert len(caught_warnings) == 3
            assert "is deprecated" in str(caught_warnings[-1].message)

            _ = batch.keypoints
            assert len(caught_warnings) == 4
            assert "is deprecated" in str(caught_warnings[-1].message)

            _ = batch.bounding_boxes
            assert len(caught_warnings) == 5
            assert "is deprecated" in str(caught_warnings[-1].message)
Esempio n. 9
0
	def createBatchGeneratorInParallel(inputs, outputs, batch_size, shuffle):
		num_examples = len(inputs)
		if batch_size is None:
			batch_size = num_examples
		if batch_size <= 0:
			raise ValueError('Invalid batch size: {}'.format(batch_size))

		indices = np.arange(num_examples)
		if shuffle:
			np.random.shuffle(indices)

		start_idx = 0
		while True:
			end_idx = start_idx + batch_size
			batch_indices = indices[start_idx:end_idx]
			if batch_indices.size > 0:  # If batch_indices is non-empty.
				# FIXME [fix] >> Does not work correctly in time-major data.
				batch_inputs, batch_outputs = inputs[batch_indices], outputs[batch_indices]
				if batch_inputs.size > 0 and batch_outputs.size > 0:  # If batch_inputs and batch_outputs are non-empty.
					# Add e.g. keypoints=... or bounding_boxes=... here to also augment keypoints / bounding boxes on these images.
					yield ia.Batch(images=batch_inputs, segmentation_maps=batch_outputs)

			if end_idx >= num_examples:
				break
			start_idx = end_idx
Esempio n. 10
0
def _lane_argue(*, image, lane_src):
    lines_tuple = [[(float(pt['x']), float(pt['y'])) for pt in line_spec] for line_spec in lane_src['Lines']]
    lss = [ia_LineString(line_tuple_spec) for line_tuple_spec in lines_tuple]

    lsoi = LineStringsOnImage(lss, shape=image.shape)
    color_shift = iaa.OneOf([
        iaa.GaussianBlur(sigma=(0.5, 1.5)),
        iaa.LinearContrast((1.5, 1.5), per_channel=False),
        iaa.Multiply((0.8, 1.2), per_channel=0.2),
        iaa.AdditiveGaussianNoise(loc=0, scale=(0.0, 0.1 * 255), per_channel=0.5),
        iaa.WithColorspace(to_colorspace=iaa.CSPACE_HSV, from_colorspace=iaa.CSPACE_RGB,
                           children=iaa.WithChannels(0, iaa.Multiply((0.7, 1.3)))),
        iaa.WithColorspace(to_colorspace=iaa.CSPACE_HSV, from_colorspace=iaa.CSPACE_RGB,
                           children=iaa.WithChannels(1, iaa.Multiply((0.1, 2)))),
        iaa.WithColorspace(to_colorspace=iaa.CSPACE_HSV, from_colorspace=iaa.CSPACE_RGB,
                           children=iaa.WithChannels(2, iaa.Multiply((0.5, 1.5)))),
    ])
    posion_shift = iaa.SomeOf(4, [
        iaa.Fliplr(),
        iaa.Crop(percent=([0, 0.2], [0, 0.15], [0, 0], [0, 0.15]), keep_size=True),
        iaa.TranslateX(px=(-16, 16)),
        iaa.ShearX(shear=(-15, 15)),
        iaa.Rotate(rotate=(-15, 15))
    ])
    aug = iaa.Sequential([
        iaa.Sometimes(p=0.6, then_list=color_shift),
        iaa.Sometimes(p=0.6, then_list=posion_shift)
    ], random_order=True)
    batch = ia.Batch(images=[image], line_strings=[lsoi])
    batch_aug = list(aug.augment_batches([batch]))[0]  # augment_batches returns a generator
    image_aug = batch_aug.images_aug[0]
    lsoi_aug = batch_aug.line_strings_aug[0]
    lane_aug = [[dict(x=kpt.x, y=kpt.y) for kpt in shapely_line.to_keypoints()] for shapely_line in lsoi_aug]
    return image_aug, dict(Lines=lane_aug)
Esempio n. 11
0
 def test_deepcopy_only_images_provided(self):
     images = np.zeros((1, 1, 3), dtype=np.uint8)
     batch = ia.Batch(images=images)
     observed = batch.deepcopy()
     for attr_name in observed.__dict__.keys():
         if attr_name != "images_unaug":
             assert getattr(observed, attr_name) is None
     assert ia.is_np_array(observed.images_unaug)
Esempio n. 12
0
def test_BackgroundAugmenter__augment_images_worker():
    reseed()

    warnings.simplefilter("always")
    with warnings.catch_warnings(record=True) as caught_warnings:

        def gen():
            yield ia.Batch(images=np.zeros((1, 4, 4, 3), dtype=np.uint8))

        bl = multicore.BatchLoader(gen(), queue_size=2)
        bgaug = multicore.BackgroundAugmenter(bl,
                                              iaa.Noop(),
                                              queue_size=1,
                                              nb_workers=1)

        queue_source = multiprocessing.Queue(2)
        queue_target = multiprocessing.Queue(2)
        queue_source.put(
            pickle.dumps(
                ia.Batch(images=np.zeros((1, 4, 8, 3), dtype=np.uint8)),
                protocol=-1))
        queue_source.put(pickle.dumps(None, protocol=-1))
        bgaug._augment_images_worker(iaa.Add(1), queue_source, queue_target, 1)

        batch_aug = pickle.loads(queue_target.get())
        assert isinstance(batch_aug, ia.Batch)
        assert batch_aug.images_unaug is not None
        assert batch_aug.images_unaug.dtype == np.uint8
        assert batch_aug.images_unaug.shape == (1, 4, 8, 3)
        assert np.array_equal(batch_aug.images_unaug,
                              np.zeros((1, 4, 8, 3), dtype=np.uint8))
        assert batch_aug.images_aug is not None
        assert batch_aug.images_aug.dtype == np.uint8
        assert batch_aug.images_aug.shape == (1, 4, 8, 3)
        assert np.array_equal(batch_aug.images_aug,
                              np.zeros((1, 4, 8, 3), dtype=np.uint8) + 1)

        finished_signal = pickle.loads(queue_target.get())
        assert finished_signal is None

        source_finished_signal = pickle.loads(queue_source.get())
        assert source_finished_signal is None

        assert queue_source.empty()
        assert queue_target.empty()

        queue_source.close()
        queue_target.close()
        queue_source.join_thread()
        queue_target.join_thread()
        bl.terminate()
        bgaug.terminate()

    assert len(caught_warnings) > 0
    for warning in caught_warnings:
        assert ("BatchLoader is deprecated" in str(warning.message)
                or "BackgroundAugmenter is deprecated" in str(warning.message))
Esempio n. 13
0
 def load_batches():
     for i in range(int(Xtrain.shape[0] / batch_size)):
         print("Training: {0}/{1}".format(i, ptrain_max),
               end='\r')
         st = i * batch_size
         ed = (i + 1) * batch_size
         batch = ia.Batch(images=Xtrain[st:ed, :, :, :],
                          data=Ytrain[st:ed, :])
         yield batch
Esempio n. 14
0
    def test_to_batch_in_augmentation__only_images(self):
        batch = ia.Batch(images=np.zeros((1, 2, 2, 3), dtype=np.uint8))

        batch_inaug = batch.to_batch_in_augmentation()

        assert isinstance(batch_inaug, ia.BatchInAugmentation)
        assert ia.is_np_array(batch_inaug.images)
        assert batch_inaug.images.shape == (1, 2, 2, 3)
        assert batch_inaug.get_column_names() == ["images"]
Esempio n. 15
0
    def _test_imap_batches_both(self, call_unordered):
        batches = [
            ia.Batch(images=[ia.quokka()]),
            ia.Batch(images=[ia.quokka() + 1])
        ]

        def _generate_batches():
            for batch in batches:
                yield batch

        augseq = iaa.Noop()
        mock_Pool = mock.MagicMock()
        mock_Pool.return_value = mock_Pool
        mock_Pool.imap.return_value = batches
        mock_Pool.imap_unordered.return_value = batches
        with mock.patch("multiprocessing.Pool", mock_Pool):
            with multicore.Pool(augseq, processes=1) as pool:
                gen = _generate_batches()
                if call_unordered:
                    _ = list(pool.imap_batches_unordered(gen))
                else:
                    _ = list(pool.imap_batches(gen))

            if call_unordered:
                to_check = mock_Pool.imap_unordered
            else:
                to_check = mock_Pool.imap

            assert to_check.call_count == 1
            assert to_check.call_args[0][0] == multicore._Pool_starworker
            arg_batches = list(
                to_check.call_args[0]
                [1])  # convert generator to list, make it subscriptable
            # args, arg 1 (batches with ids), tuple 0, entry 0 in tuple (=> batch id)
            assert arg_batches[0][0] == 0
            # tuple 0, entry 1 in tuple (=> batch)
            assert np.array_equal(arg_batches[0][1].images_unaug,
                                  batches[0].images_unaug)
            # tuple 1, entry 0 in tuple (=> batch id)
            assert arg_batches[1][0] == 1
            # tuple 1, entry 1 in tuple (=> batch)
            assert np.array_equal(arg_batches[1][1].images_unaug,
                                  batches[1].images_unaug)
Esempio n. 16
0
def chapter_augmenters_blendalphaboundingboxes():
    fn_start = "blend/blendalphaboundingboxes"

    aug = iaa.BlendAlphaBoundingBoxes(None, background=iaa.Multiply(0.0))
    batch = ia.Batch(
        images=[ia.quokka(size=(128, 128))] * (4 * 1),
        bounding_boxes=[ia.quokka_bounding_boxes(size=(128, 128))] * (4 * 1))
    run_and_save_augseq_batch(fn_start + "_multiply_background.jpg",
                              aug,
                              batch,
                              cols=4,
                              rows=1)
Esempio n. 17
0
    def test_augmentations_with_seed_match_for_images_and_keypoints(self):
        augseq = iaa.AddElementwise((0, 255))
        image = np.zeros((10, 10, 1), dtype=np.uint8)
        # keypoints here will not be changed by augseq, but they will induce
        # deterministic mode to start in augment_batches() as each batch
        # contains images AND keypoints
        kps = ia.KeypointsOnImage([ia.Keypoint(x=2, y=0)], shape=(10, 10, 1))
        batch = ia.Batch(images=np.uint8([image, image]), keypoints=[kps, kps])
        batches = [batch.deepcopy() for _ in sm.xrange(60)]

        # seed=1
        with multicore.Pool(augseq, processes=2, maxtasksperchild=30,
                            seed=1) as pool:
            batches_aug1 = pool.map_batches(batches, chunksize=2)
        # seed=1
        with multicore.Pool(augseq, processes=2, seed=1) as pool:
            batches_aug2 = pool.map_batches(batches, chunksize=1)
        # seed=2
        with multicore.Pool(augseq, processes=2, seed=2) as pool:
            batches_aug3 = pool.map_batches(batches, chunksize=1)

        assert len(batches_aug1) == 60
        assert len(batches_aug2) == 60
        assert len(batches_aug3) == 60

        for batches_aug in [batches_aug1, batches_aug2, batches_aug3]:
            for batch in batches_aug:
                for keypoints_aug in batch.keypoints_aug:
                    assert keypoints_aug.keypoints[0].x == 2
                    assert keypoints_aug.keypoints[0].y == 0

        for b1, b2, b3 in zip(batches_aug1, batches_aug2, batches_aug3):
            # images were augmented
            assert not np.array_equal(b1.images_unaug, b1.images_aug)
            assert not np.array_equal(b2.images_unaug, b2.images_aug)
            assert not np.array_equal(b3.images_unaug, b3.images_aug)

            # original images still the same
            assert np.array_equal(b1.images_unaug, batch.images_unaug)
            assert np.array_equal(b2.images_unaug, batch.images_unaug)
            assert np.array_equal(b3.images_unaug, batch.images_unaug)

            # augmentations for same seed are the same
            assert np.array_equal(b1.images_aug, b2.images_aug)

            # augmentations for different seeds are different
            assert not np.array_equal(b1.images_aug, b3.images_aug)

        # make sure that batches for the two pools with same seed did not
        # repeat within results (only between the results of the two pools)
        for batches_aug in [batches_aug1, batches_aug2, batches_aug3]:
            self._assert_each_augmentation_not_more_than_once(batches_aug)
Esempio n. 18
0
 def augmented_image_visializer(predictionItem:PredictionItem):
     cache_path=visualization_ctx().path
     path = cache_path + str(predictionItem.id) + ".png"
     if os.path.exists(path):
         return path    
     if len(predictionItem.y.shape) > 1: #Should be a segmentation mask in this case, better to refactor this later
         batch = imgaug.Batch(images=[predictionItem.x], segmentation_maps=[imgaug.SegmentationMapsOnImage(predictionItem.y, shape=predictionItem.y.shape)])
         aug_batch = augmenter.augment_batch(batch)
         img = aug_batch.segmentation_maps_aug[0].draw_on_image(aug_batch.images_aug[0], alpha=0.5)[0]
     else: 
         img = augmenter.augment_image(predictionItem.x) # Augment segmantation map also                             
     imageio.imwrite(path,img)    
     return path
Esempio n. 19
0
    def test_warnings_for_deprecated_properties(self):
        batch = ia.Batch()
        # self.assertWarns does not exist in py2.7
        deprecated_attr_names = ["images", "heatmaps", "segmentation_maps",
                                 "keypoints", "bounding_boxes"]
        for attr_name in deprecated_attr_names:
            with self.subTest(attr_name=attr_name),\
                    warnings.catch_warnings(record=True) as caught_warnings:
                warnings.simplefilter("always")

                _ = getattr(batch, attr_name)
                assert len(caught_warnings) == 1
                assert "is deprecated" in str(caught_warnings[-1].message)
Esempio n. 20
0
def generic_batch_generator(ds, batchSize, maxItems=-1):
    indexes = None
    if maxItems != -1:
        indexes = list(range(min(maxItems, len(ds))))
    dg = GenericDataSetSequence(ds, batchSize, indexes, False)
    for i in range(len(dg)):
        zz = dg[i]
        if len(zz) == 3:
            X, y, s = zz
        else:
            X, y = zz
        yield imgaug.Batch(images=X, data=y)
    return
Esempio n. 21
0
    def test_augmentations_without_seed_differ(self):
        augseq = iaa.AddElementwise((0, 255))
        image = np.zeros((10, 10, 1), dtype=np.uint8)
        batch = ia.Batch(images=np.uint8([image, image]))
        batches = [batch.deepcopy() for _ in sm.xrange(20)]
        with multicore.Pool(augseq, processes=2, maxtasksperchild=5) as pool:
            batches_aug = pool.map_batches(batches, chunksize=2)
        with multicore.Pool(augseq, processes=2) as pool:
            batches_aug.extend(pool.map_batches(batches, chunksize=1))

        assert len(batches_aug) == 2 * 20

        self._assert_each_augmentation_not_more_than_once(batches_aug)
Esempio n. 22
0
    def test_fill_from_augmented_normalized_batch(self):
        batch = ia.UnnormalizedBatch(
            images=np.zeros((1, 2, 2, 3), dtype=np.uint8),
            heatmaps=[np.zeros((2, 2, 1), dtype=np.float32)],
            segmentation_maps=[np.zeros((2, 2, 1), dtype=np.int32)],
            keypoints=[[(0, 0)]],
            bounding_boxes=[[ia.BoundingBox(0, 0, 1, 1)]],
            polygons=[[ia.Polygon([(0, 0), (1, 0), (1, 1)])]],
            line_strings=[[ia.LineString([(0, 0), (1, 0)])]])
        batch_norm = ia.Batch(
            images=np.zeros((1, 2, 2, 3), dtype=np.uint8),
            heatmaps=[
                ia.HeatmapsOnImage(np.zeros((2, 2, 1), dtype=np.float32),
                                   shape=(2, 2, 3))
            ],
            segmentation_maps=[
                ia.SegmentationMapsOnImage(np.zeros((2, 2, 1), dtype=np.int32),
                                           shape=(2, 2, 3))
            ],
            keypoints=[
                ia.KeypointsOnImage([ia.Keypoint(0, 0)], shape=(2, 2, 3))
            ],
            bounding_boxes=[
                ia.BoundingBoxesOnImage([ia.BoundingBox(0, 0, 1, 1)],
                                        shape=(2, 2, 3))
            ],
            polygons=[
                ia.PolygonsOnImage([ia.Polygon([(0, 0), (1, 0), (1, 1)])],
                                   shape=(2, 2, 3))
            ],
            line_strings=[
                ia.LineStringsOnImage([ia.LineString([(0, 0), (1, 0)])],
                                      shape=(2, 2, 3))
            ])
        batch_norm.images_aug = batch_norm.images_unaug
        batch_norm.heatmaps_aug = batch_norm.heatmaps_unaug
        batch_norm.segmentation_maps_aug = batch_norm.segmentation_maps_unaug
        batch_norm.keypoints_aug = batch_norm.keypoints_unaug
        batch_norm.bounding_boxes_aug = batch_norm.bounding_boxes_unaug
        batch_norm.polygons_aug = batch_norm.polygons_unaug
        batch_norm.line_strings_aug = batch_norm.line_strings_unaug

        batch = batch.fill_from_augmented_normalized_batch(batch_norm)

        assert batch.images_aug.shape == (1, 2, 2, 3)
        assert ia.is_np_array(batch.heatmaps_aug[0])
        assert ia.is_np_array(batch.segmentation_maps_aug[0])
        assert batch.keypoints_aug[0][0] == (0, 0)
        assert batch.bounding_boxes_aug[0][0].x1 == 0
        assert batch.polygons_aug[0][0].exterior[0][0] == 0
        assert batch.line_strings_aug[0][0].coords[0][0] == 0
Esempio n. 23
0
    def test_inputs_not_lost(self):
        """Test to make sure that inputs (e.g. images) are never lost."""
        def _assert_contains_all_ids(batches_aug):
            # batch.images_unaug
            ids = set()
            for batch_aug in batches_aug:
                ids.add(int(batch_aug.images_unaug.flat[0]))
                ids.add(int(batch_aug.images_unaug.flat[1]))
            for idx in sm.xrange(2 * 100):
                assert idx in ids
            assert len(ids) == 200

            # batch.images_aug
            ids = set()
            for batch_aug in batches_aug:
                ids.add(int(batch_aug.images_aug.flat[0]))
                ids.add(int(batch_aug.images_aug.flat[1]))
            for idx in sm.xrange(2 * 100):
                assert idx in ids
            assert len(ids) == 200

        augseq = iaa.Identity()
        image = np.zeros((1, 1, 1), dtype=np.uint8)
        # creates batches containing images with ids from 0 to 199 (one pair
        # of consecutive ids per batch)
        batches = [
            ia.Batch(
                images=np.uint8([image + b_idx * 2, image + b_idx * 2 + 1]))
            for b_idx in sm.xrange(100)
        ]

        with multicore.Pool(augseq, processes=2, maxtasksperchild=25) as pool:
            batches_aug = pool.map_batches(batches)
            _assert_contains_all_ids(batches_aug)

        with multicore.Pool(augseq, processes=2, maxtasksperchild=25,
                            seed=1) as pool:
            batches_aug = pool.map_batches(batches)
            _assert_contains_all_ids(batches_aug)

        with multicore.Pool(augseq, processes=3, seed=2) as pool:
            batches_aug = pool.map_batches(batches)
            _assert_contains_all_ids(batches_aug)

        with multicore.Pool(augseq, processes=2, seed=None) as pool:
            batches_aug = pool.map_batches(batches)
            _assert_contains_all_ids(batches_aug)

            batches_aug = pool.map_batches(batches)
            _assert_contains_all_ids(batches_aug)
Esempio n. 24
0
    def _loadBatches(self):
        indices = np.arange(self._num_examples)
        if self._shuffle:
            np.random.shuffle(indices)

        for step in range(self._num_steps):
            start = step * self._batch_size
            end = start + self._batch_size
            batch_indices = indices[start:end]
            if batch_indices.size > 0:  # If batch_indices is non-empty.
                batch_images = self._images[batch_indices]
                if batch_images.size > 0:  # If batch_images is non-empty.
                    # Create the batch object to send to the background processes.
                    yield ia.Batch(images=batch_images, data=batch_indices)
Esempio n. 25
0
def chapter_augmenters_blendalphasegmapclassids():
    fn_start = "blend/blendalphasegmapclassids"

    aug = iaa.BlendAlphaSegMapClassIds([1, 3],
                                       foreground=iaa.AddToHue((-100, 100)))
    batch = ia.Batch(
        images=[ia.quokka(size=(128, 128))] * (4 * 2),
        segmentation_maps=[ia.quokka_segmentation_map(size=(128, 128))] *
        (4 * 2))
    run_and_save_augseq_batch(fn_start + "_hue.jpg",
                              aug,
                              batch,
                              cols=4,
                              rows=2)
Esempio n. 26
0
    def test_init(self):
        attr_names = [
            "images", "heatmaps", "segmentation_maps", "keypoints",
            "bounding_boxes", "polygons"
        ]
        batch = ia.Batch()
        for attr_name in attr_names:
            assert getattr(batch, "%s_unaug" % (attr_name, )) is None
            assert getattr(batch, "%s_aug" % (attr_name, )) is None
        assert batch.data is None

        # we exploit here that Batch() init does not verify its inputs
        batch = ia.Batch(images=0,
                         heatmaps=1,
                         segmentation_maps=2,
                         keypoints=3,
                         bounding_boxes=4,
                         polygons=5,
                         data=6)
        for i, attr_name in enumerate(attr_names):
            assert getattr(batch, "%s_unaug" % (attr_name, )) == i
            assert getattr(batch, "%s_aug" % (attr_name, )) is None
        assert batch.data == 6
Esempio n. 27
0
 def test___init___all_arguments_provided(self):
     # we exploit here that Batch() init does not verify its inputs
     batch = ia.Batch(images=0,
                      heatmaps=1,
                      segmentation_maps=2,
                      keypoints=3,
                      bounding_boxes=4,
                      polygons=5,
                      line_strings=6,
                      data=7)
     for i, attr_name in enumerate(ATTR_NAMES):
         assert getattr(batch, "%s_unaug" % (attr_name, )) == i
         assert getattr(batch, "%s_aug" % (attr_name, )) is None
     assert batch.data == 7
Esempio n. 28
0
def load_batches_from_numpy():
    batch_size = 10
    shuffle = True
    is_time_major = False

    astronaut = data.astronaut()
    astronaut = ia.imresize_single_image(astronaut, (64, 64))
    images = list()
    for _ in range(100):
        images.append(astronaut)
    images = np.array(images)

    #--------------------
    batch_dim = 1 if is_time_major else 0

    num_examples, steps_per_epoch = 0, 0
    if images is not None:
        num_examples = images.shape[batch_dim]
        steps_per_epoch = ((num_examples - 1) // batch_size +
                           1) if num_examples > 0 else 0
    #if images is None:
    if num_examples <= 0:
        return None

    indices = np.arange(num_examples)
    if shuffle:
        np.random.shuffle(indices)

    for step in range(steps_per_epoch):
        start = step * batch_size
        end = start + batch_size
        batch_indices = indices[start:end]
        if batch_indices.size > 0:  # If batch_indices is non-empty.
            batch_images = images[batch_indices]
            if batch_images.size > 0:  # If batch_images is non-empty.
                #batch_data = []
                #for idx in batch_indices:
                #	batch_data.append((step, idx))

                # Create the batch object to send to the background processes.
                batch = ia.Batch(
                    images=batch_images,
                    #data=batch_data
                    data=batch_indices)

                yield batch
def load_batches(data, batch_size, n_batches):
    for iBatch in range(n_batches):
        dataset_choice = np.random.choice(len(data), batch_size, replace=True)
        images, phase_annotations = [], []
        for idx_dataset in dataset_choice:
            # print("\n\n\n{0}\n\n\n".format(len(self._data[idx_dataset])))
            frame_choice = np.random.choice(len(data[idx_dataset]),
                                            replace=True)
            # print("\n\n\n{0}\n\n\n".format(frame_choice))
            image, phase = data[idx_dataset][frame_choice]
            images.append(image)
            phase_annotations.append(phase)

        images = np.stack(images, axis=0)
        # np.array(images, dtype=np.uint8)
        batch = ia.Batch(images=images, data=np.array(phase_annotations))

        yield batch
Esempio n. 30
0
    def test_augmentations_with_seed_match(self):
        nb_batches = 60
        augseq = iaa.AddElementwise((0, 255))
        image = np.zeros((10, 10, 1), dtype=np.uint8)
        batch = ia.Batch(images=np.uint8([image, image]))
        batches = [batch.deepcopy() for _ in sm.xrange(nb_batches)]

        # seed=1
        with multicore.Pool(augseq, processes=2, maxtasksperchild=30,
                            seed=1) as pool:
            batches_aug1 = pool.map_batches(batches, chunksize=2)

        # seed=1
        with multicore.Pool(augseq, processes=2, seed=1) as pool:
            batches_aug2 = pool.map_batches(batches, chunksize=1)
        # seed=2
        with multicore.Pool(augseq, processes=2, seed=2) as pool:
            batches_aug3 = pool.map_batches(batches, chunksize=1)

        assert len(batches_aug1) == nb_batches
        assert len(batches_aug2) == nb_batches
        assert len(batches_aug3) == nb_batches

        for b1, b2, b3 in zip(batches_aug1, batches_aug2, batches_aug3):
            # images were augmented
            assert not np.array_equal(b1.images_unaug, b1.images_aug)
            assert not np.array_equal(b2.images_unaug, b2.images_aug)
            assert not np.array_equal(b3.images_unaug, b3.images_aug)

            # original images still the same
            assert np.array_equal(b1.images_unaug, batch.images_unaug)
            assert np.array_equal(b2.images_unaug, batch.images_unaug)
            assert np.array_equal(b3.images_unaug, batch.images_unaug)

            # augmentations for same seed are the same
            assert np.array_equal(b1.images_aug, b2.images_aug)

            # augmentations for different seeds are different
            assert not np.array_equal(b1.images_aug, b3.images_aug)

        # make sure that batches for the two pools with same seed did not
        # repeat within results (only between the results of the two pools)
        for batches_aug in [batches_aug1, batches_aug2, batches_aug3]:
            self._assert_each_augmentation_not_more_than_once(batches_aug)