def _multiprocess_files(self): '''Creates a pool to run through several files at once''' pool = Pool(processes=self._optional_params[LSC.PROCESSOR_COUNT]) pool.daemon = True # First copy any remote files as needed and create final file list if (self._user_params.get(LSC.LEVEL, None) and not self._are_logs_archived(self._user_params.get(LSC.DATE, None))): if (self._optional_params.get(LSC.FORCE_COPY, False) or socket.gethostname() != self._get_box_from_level(self._user_params.get(LSC.LEVEL, None))): file_list = pool.map(self._get_log_file, self._file_list) pool.close() pool.join() self._file_list = sorted(filter(lambda x: x != '', file_list)) LOGGER.debug('Final file list: %s', self._file_list) if self._file_list == []: LOGGER.error('No files found to process.') return None pool = Pool(processes=self._optional_params[LSC.PROCESSOR_COUNT]) pool.daemon = True results = pool.map(self._process_file, self._file_list) pool.close() pool.join() return results
def _multiprocess_files(self): '''Creates a pool to run through several files at once''' pool = Pool(processes=self._optional_params[LSC.PROCESSOR_COUNT]) pool.daemon = True # First copy any remote files as needed and create final file list if (self._user_params.get(LSC.LEVEL, None) and not self._are_logs_archived( self._user_params.get(LSC.DATE, None))): if (self._optional_params.get(LSC.FORCE_COPY, False) or socket.gethostname() != self._get_box_from_level( self._user_params.get(LSC.LEVEL, None))): file_list = pool.map(self._get_log_file, self._file_list) pool.close() pool.join() self._file_list = sorted(filter(lambda x: x != '', file_list)) LOGGER.debug('Final file list: %s', self._file_list) if self._file_list == []: LOGGER.error('No files found to process.') return None pool = Pool(processes=self._optional_params[LSC.PROCESSOR_COUNT]) pool.daemon = True results = pool.map(self._process_file, self._file_list) pool.close() pool.join() return results
def asyn_load_batch_images(args): params=args[0][0] pool = Pool(params["n_pool"]) pool.daemon=True results = pool.map(load_batch_wrapper,args) pool.close() pool.join() return results
def run_face_extraction(iterable, face_link, video_link): calibration = 95+27 processes = args.processes pool = Pool(processes=processes, maxtasksperchild=100) pool.daemon = True skip = True files = [filename for filename in os.listdir(face_link)] id = [file[:24] for file in files] parsed = set(id) for i, item in enumerate(iterable): if str(item['_id']) == '58dd5bf05c4e323398440e60': skip = False if skip: print 'here', i continue if item['_id'] in parsed: print 'skipping' continue print 'now here', str(item['_id']) file_name = datetime.datetime.fromtimestamp(item['time']).strftime("%m-%d-%y_%H") door_id = item['door'] file_name = video_link + door_id + '_' + file_name + '.avi' print i, item['door'], datetime.datetime.fromtimestamp(item['time']).strftime('%Y-%m-%d %H:%M:%S') print item['_id'] if int(datetime.datetime.fromtimestamp(item['time']).strftime('%H')) > 22: continue d = datetime.datetime.fromtimestamp(item['time']) newdate = d.replace(minute=0, second=0).strftime('%s') event_time = int(item['time']) - int(newdate) - calibration if os.path.exists(file_name): cap = cv2.VideoCapture(file_name) length = int(cap.get(cv2.cv.CV_CAP_PROP_FRAME_COUNT)) fps = length / 3600 start = (event_time - 2.5) * fps end = (event_time + 2.5) * fps print length, start, end cap.set(1, start) photo_id = 1 frames = [] paths = [] for _ in range(int(math.floor(start)), int(math.ceil(end))): # print 'here', grabbed, frame = cap.read() frames.append(frame) path = face_link + str(item['_id']) + '_' + str(photo_id) + '.png' paths.append(path) photo_id += 1 if not grabbed: print 'not grabbed' break try: pars = zip(frames, paths) pool.map(getRep, pars) except Exception as e: print e
def main(): #original_sigint_handler = signal.signal(signal.SIGINT, signal.SIG_IGN) pool = Pool(processes=3) pool.daemon = True #signal.signal(signal.SIGINT, original_sigint_handler) try: print('Press Ctrl+C to stop') pool.map(run_crawler, processes) except KeyboardInterrupt: print("Keyboard interrupt received! Stoping...") pool.terminate() pool.join() pass
def setup_codec(yuv_info, database): signal.signal(signal.SIGTERM, term) line_pool = LineContain(len(option.codec)) line_pool.set_data_type(yuv_info) line_pool.build_group(option.codec) pipe_hm = None queue = multiprocessing.Manager().Queue() process_num = option.Multi_process if option.Multi_process is not 0 else 1 process_pool = Pool(processes=process_num) process_pool.daemon = True # # def signal_handler(sig_num, addtion): # process_pool.terminate() # signal.signal(signal.SIGTERM, signal_handler) for codec_index in option.codec: name = codec_index[2] + '_' + codec_index[4] is_find, pool = database.find_data(yuv_info, name, codec_index[5]) if is_find: line_pool.set_group(pool, name) else: if codec_index[2] == 'HM': codec_index_hm = codec_index pipe_hm = hm_execute(yuv_info, codec_index, line_pool) else: if __name__ == '__main__': process_pool.apply_async(codec_execute, ( yuv_info, codec_index, line_pool, database, queue, )) if pipe_hm is not None: line = pipe_hm.pop_pro_hm() line_pool.add_group_ele('HM_' + codec_index_hm[4], line) database.add_data('HM_' + codec_index_hm[4], line) pipe_hm.clear() process_pool.close() process_pool.join() print '%s codec finish for case %s' % (queue.qsize(), name) for i in range(queue.qsize()): data = queue.get() for j in data: line_pool.add_group_ele(j[0], j[1]) database.add_data(j[0], j[1]) for codec, line in line_pool.group.items(): if len(line) == 0: print >> sys.stderr, '%s is empty, check cmdline first!' % codec line_pool.check_baseline(option.codec) return line_pool
def find_in_files(keyword, files, multiline=False,\ ignore_case=True, worker_count=WORKER_COUNT): """ @keyword: string @files: list of string text file paths @ignore_case: boolean @multiline: boolean @worker_count: int - the number of processes used for parallel searching returns a dict where the keys are the path of the files that matched and each value is a list of strings (matched sentences) """ if len(files) < 1: return {} regex = get_regex(keyword) try: re.compile(regex) # validate it except re.error: raise re.error( 'Invalid regex. Make sure your keywords do not have punctuations.') results = {} pool = Pool(worker_count) pool.daemon = True workers = [] for path in files: workers.append(pool.apply_async(find_in_file,\ (path, regex, multiline, ignore_case)\ )) for worker in workers: res = worker.get() if res is not None: path, val = res results[path] = val pool.terminate() return results
fig.savefig(os.path.join(path_img_out2, img_name), dpi=200, bbox_inches='tight') plt.close(fig) print '%s: done' % img_name return if __name__=="__main__": if len(sys.argv) == 2: layers = [int(sys.argv[1])] else: layers = [5,4,3,2,1] tpl_info = template_info() p = Pool(8) p.daemon = True num_features = 64 features = range(64) seg_idx = -1 for layer in layers: for h_key in tpl_info.harmonic_keys: # multiprocessing doesn't work due to matplotlib/GUI/main_thread/etc.. # args = zip(features, [tpl_info]*num_features, [h_key]*num_features, [layer]*num_features) # p.map(export_image, args) for feat in features: args= (feat, tpl_info, h_key, layer) export_image(args)
def BruteCrack(self): self.BruteSaveEntry() rar = self.BruteEntryRarSource.get() rar = rar.replace('\\', '/') unrar = self.BruteEntryUnrar.get() unrar = unrar.replace('\\', '/') dict = self.BruteEntryDictPath.get() dict = dict.replace('\\', '/') is_rar = self.BruteRadioRarVar.get() ii = self.BruteEntryii.get() if self.bl.check_legit_int(ii) == -1: return ii = int(ii) ii0 = self.BruteEntryii0.get() if self.bl.check_legit_int(ii0) == -1: return ii0 = int(ii0) fi = self.BruteEntryFeedback.get() if self.bl.check_legit_int(fi) == -1: return fi = int(fi) prefix = self.BruteEntryPrefix.get() suffix = self.BruteEntrySuffix.get() outset = self.BruteEntryOutset.get() core = self.BruteEntryCore.get() if self.bl.check_legit_int(core) == -1: return core = int(core) is_custom = self.BruteCheckCustomDictVar.get() is_show_info = self.BruteCheckShowInfoVar.get() if not os.path.isfile(rar): messagebox.showerror("Warrning", "_____RAR FILE NOT EXIST_____") return if not os.path.isfile(unrar): messagebox.showerror("Warrning", "_____UnRAR.exe or 7z.exe NOT EXIST_____") return if is_custom == 1: if not os.path.isfile(dict): messagebox.showerror("Warrning", "_____DICT FILE NOT EXIST_____") return if is_custom == 0 and is_rar == '7z': messagebox.showerror( "Warrning", 'Due to there is some unknows bugs in "7z.exe"\n\n\"7z" should only be used in custom dict mode' ) return p_info = self.bl.get_path_info(rar) if p_info['ext'] != '.rar' and is_rar == 'rar': if p_info['ext'] != '.r': messagebox.showerror("Warrning", 'Not a "rar" file') return if p_info['ext'] != '.7z' and is_rar == '7z': messagebox.showerror("Warrning", 'Not a "7z" file') return if p_info['ext'] != '.zip' and is_rar == 'zip': messagebox.showerror("Warrning", 'Not a "zip" file') return dir = rar[0:rar.rfind(r'/') + 1] if is_custom == 1: try: f = open(dict, 'r', encoding='utf-8') except: messagebox.showerror("Warrning", "Need a utf-8 dict file") return d = f.read() f.close() d = d[1:] # utf-8 file get a '\u' at first d = d.split('\n') else: # is_custom == 0 d = self.BruteTextUp.get() #d = self.bl.ascii if d == '': messagebox.showerror("Warrning", "_____DICT ERROR_____") return if outset != '': if is_custom == 0: for c in outset: if c not in d: messagebox.showerror( "Warrning", 'Crack Start From: "' + c + '" not exist in dict') return else: if outset not in d: messagebox.showerror( "Warrning", 'Crack Start From: "' + outset + '" not exist in dict') return else: outset = d[0] self.BruteEntryOutset.insert(0, outset) self.BruteTextDown.delete("1.0", "end") pa = outset if is_custom == 1: ii = len(d) for i in range(ii): p = Pool(core) p.daemon = True for i2 in range(core): password = pa pa, newpara = self.rl.unrar_brute_get_parameter( rar=rar, dir=dir, unrar=unrar, dict=d, prefix=prefix, suffix=suffix, outset=pa, ii0=ii0, is_custom_dict=is_custom, is_rar=is_rar) if is_custom == 1 and is_rar == 'zip': # in custom password mode, zip file use python internal api, so no multi-threading if newpara != 0: continue else: self.BruteTextDown.insert('1.0', '\n') self.BruteTextDown.insert( '1.0', 'Extracting...\nPassword: '******'1.0', '\n') self.BruteTextDown.insert( '1.0', 'Extracting...\nPassword: '******'1.0', '\n') self.BruteTextDown.insert('1.0', r) self.BruteTextDown.insert('1.0', '\n') self.BruteTextDown.insert('1.0', output) self.root.update() p.close() p.join() if (i + 1) % fi == 0: self.BruteTextDown.insert('1.0', '\n') self.BruteTextDown.insert( '1.0', datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S ') + 'big loop' + str(i + 1) + ':' + prefix + pa + suffix) self.root.update() if self.brute_stop == 1: self.brute_stop = 0 return self.BruteTextDown.insert('1.0', '\n') self.BruteTextDown.insert( '1.0', 'big loop: ' + str(i + 1) + '\n' + 'You can fill this in "Crack Start From":' + prefix + pa + suffix)
plt.close(fig) print '%s: done' % img_name return if __name__ == "__main__": if len(sys.argv) == 2: layers = [int(sys.argv[1])] else: layers = [5, 4, 3, 2, 1] tpl_info = template_info() p = Pool(8) p.daemon = True num_features = 64 features = range(64) seg_idx = -1 for layer in layers: for h_key in tpl_info.harmonic_keys: # multiprocessing doesn't work due to matplotlib/GUI/main_thread/etc.. # args = zip(features, [tpl_info]*num_features, [h_key]*num_features, [layer]*num_features) # p.map(export_image, args) for feat in features: args = (feat, tpl_info, h_key, layer) export_image(args) ''' for p_inst in tpl_info.percussive_ins: