def load_extractor(self): """ Set on the fly extraction """ logger.warning("No Alignments file found. Extracting on the fly.") logger.warning("NB: This will use the inferior dlib-hog for extraction " "and dlib pose predictor for landmarks. It is recommended " "to perfom Extract first for superior results") for task in ("load", "detect", "align"): queue_manager.add_queue(task, maxsize=0) detector = PluginLoader.get_detector("dlib_hog")(loglevel=self.args.loglevel) aligner = PluginLoader.get_aligner("dlib")(loglevel=self.args.loglevel) d_kwargs = {"in_queue": queue_manager.get_queue("load"), "out_queue": queue_manager.get_queue("detect")} a_kwargs = {"in_queue": queue_manager.get_queue("detect"), "out_queue": queue_manager.get_queue("align")} d_process = SpawnProcess(detector.run, **d_kwargs) d_event = d_process.event d_process.start() a_process = SpawnProcess(aligner.run, **a_kwargs) a_event = a_process.event a_process.start() d_event.wait(10) if not d_event.is_set(): raise ValueError("Error inititalizing Detector") a_event.wait(10) if not a_event.is_set(): raise ValueError("Error inititalizing Aligner") self.extract_faces = True
def add_queues(self): """ Add the queues for convert """ logger.debug("Adding queues. Queue size: %s", self.queue_size) for qname in ("convert_in", "convert_out", "patch"): queue_manager.add_queue(qname, self.queue_size, multiprocessing_queue=False)
def _add_queues(self): """ Add the required processing queues to Queue Manager """ queues = {} tasks = [f"extract{self._instance}_{phase}_in" for phase in self._flow] tasks.append(f"extract{self._instance}_{self._final_phase}_out") for task in tasks: # Limit queue size to avoid stacking ram queue_manager.add_queue(task, maxsize=self._queue_size) queues[task] = queue_manager.get_queue(task) logger.debug("Queues: %s", queues) return queues
def add_queues(self): """ Add the required processing queues to Queue Manager """ for task in ("load", "detect", "align", "save"): size = 0 if task == "load" or (not self.is_parallel and task == "detect"): size = 100 if task == "load": q_name = "extract_in" elif task == "save": q_name = "extract_out" else: q_name = task queue_manager.add_queue(q_name, maxsize=size)
def add_queues(self): """ Add the required processing queues to Queue Manager """ queues = dict() for task in ("extract_detect_in", "extract_align_in", "extract_align_out"): # Limit queue size to avoid stacking ram size = 32 if task == "extract_detect_in" or (not self.is_parallel and task == "extract_align_in"): size = 64 queue_manager.add_queue(task, maxsize=size) queues[task] = queue_manager.get_queue(task) logger.debug("Queues: %s", queues) return queues
def _add_queues(self): """ Add the required processing queues to Queue Manager """ queues = dict() for task in ("extract_detect_in", "extract_align_in", "extract_align_out"): # Limit queue size to avoid stacking ram self._queue_size = 32 if task == "extract_detect_in" or (not self._is_parallel and task == "extract_align_in"): self._queue_size = 64 queue_manager.add_queue(task, maxsize=self._queue_size) queues[task] = queue_manager.get_queue(task) logger.debug("Queues: %s", queues) return queues
def add_queues(self): """ Add the queues for convert """ logger.debug("Adding queues. Queue size: %s", self.queue_size) for qname in ("convert_in", "save", "patch"): queue_manager.add_queue(qname, self.queue_size)
import sys sys.path.append( '/annoroad/data1/bioinfo/PMO/yangmengcheng/Work/MutConfidence-Model/') from lib.queue_manager import queue_manager from lib.multithreading import MultiThread import t queue_manager.add_queue('test') queue = queue_manager.get_queue('test') def rese(): while True: s = queue.get() if s == 'EOF': print('get EOF, terminate!') return else: print(s) def iteration(): for i in range(5): yield i def send(): t1 = MultiThread(rese) t1.start() for i in iteration(): #print(i)
def add_queues(self): """ Add the queues for convert """ logger.debug("Adding queues. Queue size: %s", self.queue_size) for qname in ("convert_in", "convert_out", "patch"): queue_manager.add_queue(qname, self.queue_size)