def __init__(self, batch_loader, augseq, queue_size=50, nb_workers="auto"): ia.do_assert(queue_size > 0) self.augseq = augseq self.queue_source = (batch_loader if isinstance( batch_loader, multiprocessing.queues.Queue) else batch_loader.queue) self.queue_result = multiprocessing.Queue(queue_size) if nb_workers == "auto": try: nb_workers = multiprocessing.cpu_count() except (ImportError, NotImplementedError): nb_workers = 1 # try to reserve at least one core for the main process nb_workers = max(1, nb_workers - 1) else: ia.do_assert(nb_workers >= 1) self.nb_workers = nb_workers self.workers = [] self.nb_workers_finished = 0 seeds = iarandom.get_global_rng().generate_seeds_(nb_workers) for i in range(nb_workers): worker = multiprocessing.Process( target=self._augment_images_worker, args=(augseq, self.queue_source, self.queue_result, seeds[i])) worker.daemon = True worker.start() self.workers.append(worker)
def __init__(self, load_batch_func, queue_size=50, nb_workers=1, threaded=True): ia.do_assert( queue_size >= 2, "Queue size for BatchLoader must be at least 2, " "got %d." % (queue_size, )) ia.do_assert( nb_workers >= 1, "Number of workers for BatchLoader must be at least 1, " "got %d" % (nb_workers, )) self._queue_internal = multiprocessing.Queue(queue_size // 2) self.queue = multiprocessing.Queue(queue_size // 2) self.join_signal = multiprocessing.Event() self.workers = [] self.threaded = threaded seeds = iarandom.get_global_rng().generate_seeds_(nb_workers) for i in range(nb_workers): if threaded: worker = threading.Thread(target=self._load_batches, args=(load_batch_func, self._queue_internal, self.join_signal, None)) else: worker = multiprocessing.Process( target=self._load_batches, args=(load_batch_func, self._queue_internal, self.join_signal, seeds[i])) worker.daemon = True worker.start() self.workers.append(worker) self.main_worker_thread = threading.Thread(target=self._main_worker, args=()) self.main_worker_thread.daemon = True self.main_worker_thread.start()