Exemplo n.º 1
0
    def _load_batches(cls, load_batch_func, queue_internal, join_signal,
                      seedval):
        if seedval is not None:
            random.seed(seedval)
            np.random.seed(seedval)
            iarandom.seed(seedval)

        try:
            gen = (load_batch_func() if not ia.is_generator(load_batch_func)
                   else load_batch_func)
            for batch in gen:
                ia.do_assert(
                    isinstance(batch, Batch),
                    "Expected batch returned by load_batch_func to "
                    "be of class imgaug.Batch, got %s." % (type(batch), ))
                batch_pickled = pickle.dumps(batch, protocol=-1)
                while not join_signal.is_set():
                    try:
                        queue_internal.put(batch_pickled, timeout=0.005)
                        break
                    except QueueFull:
                        pass
                if join_signal.is_set():
                    break
        except Exception:
            traceback.print_exc()
        finally:
            queue_internal.put("")
        time.sleep(0.01)
Exemplo n.º 2
0
    def _load_batches(self, load_batch_func, queue_internal, join_signal, seedval):
        if seedval is not None:
            random.seed(seedval)
            np.random.seed(seedval)
            ia.seed(seedval)

        try:
            gen = load_batch_func() if not ia.is_generator(load_batch_func) else load_batch_func
            for batch in gen:
                ia.do_assert(isinstance(batch, Batch),
                             "Expected batch returned by load_batch_func to be of class imgaug.Batch, got %s." % (
                                 type(batch),))
                batch_pickled = pickle.dumps(batch, protocol=-1)
                while not join_signal.is_set():
                    try:
                        queue_internal.put(batch_pickled, timeout=0.005)
                        break
                    except QueueFull:
                        pass
                if join_signal.is_set():
                    break
        except Exception:
            traceback.print_exc()
        finally:
            queue_internal.put("")
        time.sleep(0.01)
Exemplo n.º 3
0
    def imap_batches_unordered(self, batches, chunksize=1):
        """
        Augment batches from a generator in a way that does not guarantee to preserve order.

        Parameters
        ----------
        batches : generator of imgaug.augmentables.batches.Batch
            The batches to augment, provided as a generator. Each call to the generator should yield exactly one
            batch.

        chunksize : None or int, optional
            Rough indicator of how many tasks should be sent to each worker. Increasing this number can improve
            performance.

        Yields
        ------
        imgaug.augmentables.batches.Batch
            Augmented batch.

        """
        assert ia.is_generator(batches), (
            "Expected to get a generator as 'batches', got type %s. " +
            "Call map_batches() if you use lists.") % (type(batches), )
        # TODO change this to 'yield from' once switched to 3.3+
        gen = self.pool.imap_unordered(_Pool_starworker,
                                       self._handle_batch_ids_gen(batches),
                                       chunksize=chunksize)
        for batch in gen:
            yield batch
Exemplo n.º 4
0
    def imap_batches_unordered(self, batches, chunksize=1,
                               output_buffer_size=None):
        """
        Augment batches from a generator in a way that does not guarantee to preserve order.

        Pattern for output buffer constraint is from
        https://stackoverflow.com/a/47058399.

        Parameters
        ----------
        batches : generator of imgaug.augmentables.batches.Batch
            The batches to augment, provided as a generator. Each call to the generator should yield exactly one
            batch.

        chunksize : None or int, optional
            Rough indicator of how many tasks should be sent to each worker. Increasing this number can improve
            performance.

        output_buffer_size : None or int, optional
            Max number of batches to handle *at the same time* in the *whole*
            pipeline (including already augmented batches that are waiting to
            be requested). If the buffer size is reached, no new batches will
            be loaded from `batches` until a produced (i.e. augmented) batch is
            consumed (i.e. requested from this method).
            The buffer is unlimited if this is set to ``None``. For large
            datasets, this should be set to an integer value to avoid filling
            the whole RAM if loading+augmentation happens faster than training.

            *New in version 0.3.0.*

        Yields
        ------
        imgaug.augmentables.batches.Batch
            Augmented batch.

        """
        assert ia.is_generator(batches), ("Expected to get a generator as 'batches', got type %s. "
                                          + "Call map_batches() if you use lists.") % (type(batches),)

        # buffer is either None or a Semaphore
        output_buffer_left = _create_output_buffer_left(output_buffer_size)

        # TODO change this to 'yield from' once switched to 3.3+
        gen = self.pool.imap_unordered(
            _Pool_starworker,
            self._ibuffer_batch_loading(
                self._handle_batch_ids_gen(batches),
                output_buffer_left
            ),
            chunksize=chunksize
        )

        for batch in gen:
            yield batch
            if output_buffer_left is not None:
                output_buffer_left.release()
Exemplo n.º 5
0
 def _assert_batches_is_generator(cls, batches):
     assert ia.is_generator(
         batches), ("Expected `batches` to be generator, got type %s. Call "
                    "map_batches() if you use lists.") % (type(batches), )