def HandleChildJobs(j, SsTemp, EmailWhenDone, SsName, SsRTStore, IsFast, CallMode): TempMetaFile = os.path.join(SsTemp, TEMPMETAFILE + '_' + j) if PathExists(TempMetaFile): MetaData = pickle.load(open(TempMetaFile, 'r')) child_jobs = MetaData.get('child_jobs') if child_jobs: print('... but found child jobs', child_jobs, 'so now waiting for those...') ExitStatus = MetaData['ExitStatus'] statuses = wait_and_get_statuses(child_jobs) if not all([ces == 0 for ces in statuses]): ExitStatus = -1 FinishUp(j, ExitStatus, MetaData['RunOutput'], MetaData['Before'], MetaData['After'], MetaData['Creates'], MetaData['DepListj'], MetaData['OriginalTimes'], MetaData['OrigDirInfo'], MetaData['TempSOIS'], TempMetaFile, CallMode, EmailWhenDone, SsName, SsRTStore, IsFast, child_jobs=child_jobs)
def HandleChildJobs(j,SsTemp,EmailWhenDone,SsName,SsRTStore,IsFast,CallMode): TempMetaFile = os.path.join(SsTemp , TEMPMETAFILE + '_' + j) if PathExists(TempMetaFile): MetaData = pickle.load(open(TempMetaFile,'r')) child_jobs = MetaData.get('child_jobs') if child_jobs: print('... but found child jobs', child_jobs, 'so now waiting for those...') ExitStatus = MetaData['ExitStatus'] statuses = wait_and_get_statuses(child_jobs) if not all([ces == 0 for ces in statuses]): ExitStatus = -1 FinishUp(j,ExitStatus,MetaData['RunOutput'], MetaData['Before'], MetaData['After'], MetaData['Creates'], MetaData['DepListj'], MetaData['OriginalTimes'], MetaData['OrigDirInfo'], MetaData['TempSOIS'], TempMetaFile,CallMode, EmailWhenDone, SsName,SsRTStore,IsFast, child_jobs = child_jobs)
def extract_and_evaluate_parallel(outfile,image_certificate_file,model_certificate_file,cpath,convolve_func_name,task,ext_hash): (model_configs, image_config_gen, model_hash, image_hash, task_list, perf_col, split_coll, split_fs, splitperf_coll, splitperf_fs) = prepare_extract_and_evaluate(ext_hash, image_certificate_file, model_certificate_file, task) jobids = [] if convolve_func_name == 'numpy': opstring = '-l qname=extraction_cpu.q' elif convolve_func_name == 'pyfft': opstring = '-l qname=extraction_gpu.q -o /home/render -e /home/render' for m in model_configs: print('Evaluating model',m) for task in task_list: classifier_kwargs = task.get('classifier_kwargs',{}) print('task',task) splits = generate_splits(task,image_hash,'images') for (ind,split) in enumerate(splits): put_in_split(split,image_config_gen,m,task,ext_hash,ind,split_fs) jobid = qsub(extract_and_evaluate_parallel_core,(image_config_gen,m,task,ext_hash,ind,convolve_func_name),opstring=opstring) jobids.append(jobid) print(jobids) statuses = wait_and_get_statuses(jobids) for m in model_configs: print('Evaluating model',m) for task in task_list: split_results = get_most_recent_files(splitperf_coll,{'__hash__':ext_hash,'task':son_escape(task),'model':m['config']['model'],'images':son_escape(image_config_gen['images'])}) put_in_performance(split_results,image_config_gen,m,model_hash,image_hash,perf_col,task,ext_hash) createCertificateDict(outfile,{'image_file':image_certificate_file,'models_file':model_certificate_file})