def open_file_worker(input_queue, results_queue): '''Worker process to handle open_file operations.''' for fullpath in iter(input_queue.get, None): file_interpreters = plugin_manager.file_interpreters try: results = open_data_file(fullpath, file_interpreters) except: results = [] traceback.print_exc() results_queue.put(results)
def open_files(self, fullpaths, **kwargs): ''' Open a multiple data files. Returns a list of 'list of trials created'. ''' file_interpreters = plugin_manager.file_interpreters if len(fullpaths) == 1: try: results = open_data_file(fullpaths[0], file_interpreters, **kwargs) except: results = [] traceback.print_exc() return results num_process_workers = config_manager.get_num_workers() if len(fullpaths) < num_process_workers: num_process_workers = len(fullpaths) # setup the input and return queues. input_queue = multiprocessing.Queue() for fullpath in fullpaths: input_queue.put(fullpath) for i in xrange(num_process_workers): input_queue.put(None) results_queue = multiprocessing.Queue() # start the jobs jobs = [] for i in xrange(num_process_workers): job = multiprocessing.Process(target=open_file_worker, args=(input_queue, results_queue)) job.start() jobs.append(job) # collect the results, waiting for all the jobs to complete results_list = [] for i in xrange(len(fullpaths)): # file_interpreters return list of trial objects. results_list.extend(results_queue.get()) for job in jobs: job.join() # halt this thread until processes are all complete. return results_list