Esempio n. 1
0
    def _load_batches(self, load_batch_func, queue_internal, join_signal,
                      seedval):
        if seedval is not None:
            random.seed(seedval)
            np.random.seed(seedval)
            ia.seed(seedval)

        try:
            gen = load_batch_func(
            ) if not ia.is_generator(load_batch_func) else load_batch_func
            for batch in gen:
                ia.do_assert(
                    isinstance(batch, Batch),
                    "Expected batch returned by load_batch_func to be of class imgaug.Batch, got %s."
                    % (type(batch), ))
                batch_pickled = pickle.dumps(batch, protocol=-1)
                while not join_signal.is_set():
                    try:
                        queue_internal.put(batch_pickled, timeout=0.005)
                        break
                    except QueueFull:
                        pass
                if join_signal.is_set():
                    break
        except Exception:
            traceback.print_exc()
        finally:
            queue_internal.put("")
        time.sleep(0.01)
Esempio n. 2
0
    def _augment_images_worker(self, augseq, queue_source, queue_result, seedval):
        """
        Augment endlessly images in the source queue.

        This is a worker function for that endlessly queries the source queue (input batches),
        augments batches in it and sends the result to the output queue.

        """
        np.random.seed(seedval)
        random.seed(seedval)
        augseq.reseed(seedval)
        ia.seed(seedval)

        loader_finished = False

        while not loader_finished:
            # wait for a new batch in the source queue and load it
            try:
                batch_str = queue_source.get(timeout=0.1)
                batch = pickle.loads(batch_str)
                if batch is None:
                    loader_finished = True
                    # put it back in so that other workers know that the loading queue is finished
                    queue_source.put(pickle.dumps(None, protocol=-1))
                else:
                    batch_aug = augseq.augment_batch(batch)

                    # send augmented batch to output queue
                    batch_str = pickle.dumps(batch_aug, protocol=-1)
                    queue_result.put(batch_str)
            except QueueEmpty:
                time.sleep(0.01)

        queue_result.put(pickle.dumps(None, protocol=-1))
        time.sleep(0.01)
Esempio n. 3
0
    def _load_batches(self, load_batch_func, queue_internal, join_signal, seedval):
        if seedval is not None:
            random.seed(seedval)
            np.random.seed(seedval)
            ia.seed(seedval)

        try:
            gen = load_batch_func() if not ia.is_generator(load_batch_func) else load_batch_func
            for batch in gen:
                ia.do_assert(isinstance(batch, Batch),
                             "Expected batch returned by load_batch_func to be of class imgaug.Batch, got %s." % (
                                 type(batch),))
                batch_pickled = pickle.dumps(batch, protocol=-1)
                while not join_signal.is_set():
                    try:
                        queue_internal.put(batch_pickled, timeout=0.005)
                        break
                    except QueueFull:
                        pass
                if join_signal.is_set():
                    break
        except Exception:
            traceback.print_exc()
        finally:
            queue_internal.put("")
        time.sleep(0.01)
Esempio n. 4
0
def _Pool_worker(batch_idx, batch):
    assert ia.is_single_integer(batch_idx)
    assert isinstance(batch, (UnnormalizedBatch, Batch))
    assert Pool._WORKER_AUGSEQ is not None
    aug = Pool._WORKER_AUGSEQ
    if Pool._WORKER_SEED_START is not None:
        seed = Pool._WORKER_SEED_START + batch_idx
        seed_global = ia.SEED_MIN_VALUE + (seed - 10**9) % (ia.SEED_MAX_VALUE - ia.SEED_MIN_VALUE)
        seed_local = ia.SEED_MIN_VALUE + seed % (ia.SEED_MAX_VALUE - ia.SEED_MIN_VALUE)
        ia.seed(seed_global)
        aug.reseed(seed_local)
    result = aug.augment_batch(batch)
    return result
Esempio n. 5
0
def _Pool_initialize_worker(augseq, seed_start):
    if seed_start is None:
        # pylint falsely thinks in older versions that multiprocessing.current_process() was not
        # callable, see https://github.com/PyCQA/pylint/issues/1699
        # pylint: disable=not-callable
        process_name = multiprocessing.current_process().name
        # pylint: enable=not-callable

        # time_ns() exists only in 3.7+
        if sys.version_info[0] == 3 and sys.version_info[1] >= 7:
            seed_offset = time.time_ns()
        else:
            seed_offset = int(time.time() * 10**6) % 10**6
        seed = hash(process_name) + seed_offset
        seed_global = ia.SEED_MIN_VALUE + (seed - 10**9) % (ia.SEED_MAX_VALUE - ia.SEED_MIN_VALUE)
        seed_local = ia.SEED_MIN_VALUE + seed % (ia.SEED_MAX_VALUE - ia.SEED_MIN_VALUE)
        ia.seed(seed_global)
        augseq.reseed(seed_local)
    Pool._WORKER_SEED_START = seed_start
    Pool._WORKER_AUGSEQ = augseq
    Pool._WORKER_AUGSEQ.localize_random_state_()  # not sure if really necessary, but won't hurt either
Esempio n. 6
0
def _reseed_global_local(base_seed, augseq):
    seed_global = _derive_seed(base_seed, -10**9)
    seed_local = _derive_seed(base_seed)
    ia.seed(seed_global)
    augseq.reseed(seed_local)