job_list[block] = {"basecaller": jobid} for block, jobid in alignment_job_dict.items(): f.write(jobid + "\n") if job_list.has_key(block): job_list[block]["alignment"] = jobid else: job_list[block] = {"alignment": jobid} for jobname, jobid in merge_job_dict.items(): f.write(jobid + "\n") f.close() # write more descriptive json list of jobs with open("job_list.json", "w") as f: f.write(json.dumps(job_list, indent=2)) # multilevel plugins preprocessing level blockprocessing.runplugins(plugins, env, pluginbasefolder, url_root, "pre") # Watch status of jobs. As they finish remove the job from the list. pl_started = False alignment_job_list = alignment_job_dict.values() while len(alignment_job_list) > 0: for job in alignment_job_list: block = [block for block in blocks if block["jobid"] == job][0] # check status of jobid try: block["status"] = jobserver.jobstatus(block["jobid"]) except: traceback.print_exc() continue
job_list[block] = {'basecaller': jobid} for block, jobid in alignment_job_dict.items(): f.write(jobid+'\n') if job_list.has_key(block): job_list[block]['alignment'] = jobid else: job_list[block] = {'alignment': jobid} for jobname, jobid in merge_job_dict.items(): f.write(jobid+'\n') f.close() # write more descriptive json list of jobs with open('job_list.json','w') as f: f.write(json.dumps(job_list,indent=2)) # multilevel plugins preprocessing level blockprocessing.runplugins(plugins, env, pluginbasefolder, url_root, 'pre') # Watch status of jobs. As they finish remove the job from the list. pl_started = False alignment_job_list = alignment_job_dict.values() while len(alignment_job_list) > 0: for job in alignment_job_list: block = [block for block in blocks if block['jobid'] == job][0] #check status of jobid try: block['status'] = jobserver.jobstatus(block['jobid']) except: traceback.print_exc() continue
os.system(cmd) except: printtime("RSM createExperimentMetrics.py failed") ######################################################## # Write checksum_status.txt to raw data directory # ######################################################## if is_wholechip: try: if os.path.isfile("analysis_return_code.txt"): shutil.copyfile("analysis_return_code.txt", os.path.join(env["pathToRaw"], "checksum_status.txt")) except: traceback.print_exc() if is_thumbnail or is_wholechip: blockprocessing.runplugins(env, basefolder, url_root) else: plugin_set = set() plugin_set.add("torrentscout") plugin_set.add("rawPlots") blockprocessing.run_selective_plugins(plugin_set, env, basefolder, url_root) if env["isReverseRun"] and env["pe_forward"] != "None": try: crawler = xmlrpclib.ServerProxy( "http://%s:%d" % (CRAWLER_HOST, CRAWLER_PORT), verbose=False, allow_none=True ) except (socket.error, xmlrpclib.Fault): traceback.print_exc() printtime("crawler hostname: " + crawler.hostname())
printtime("Submitted zipping job with job ID (%s)" % (str(merge_job_dict['merge']))) else: # Proton Full Chip merge_job_dict['merge'] = spawn_cluster_job('.','MergeTLScript.py',['--do-alignment','--do-zipping'],block_job_dict.values()) printtime("Submitted merge alignment job with job ID (%s)" % (str(merge_job_dict['merge']))) # write job id's to file job_list = {} job_list['merge'] = merge_job_dict for block, jobid in block_job_dict.items(): job_list[block] = {'block_processing': jobid} with open('job_list.json','w') as f: f.write(json.dumps(job_list,indent=2)) # multilevel plugins preprocessing level plugins = blockprocessing.runplugins(plugins, env, RunLevel.PRE, plugins_params) # Watch status of jobs. As they finish remove the job from the list. pl_started = False block_job_list = block_job_dict.values() while len(block_job_list) > 0: for job in block_job_list: block = [block for block in blocks if block['jobid'] == job][0] #check status of jobid try: block['status'] = jobserver.jobstatus(block['jobid']) except: traceback.print_exc() continue
job_list[block] = {'basecaller': jobid} for block, jobid in alignment_job_dict.items(): f.write(jobid + '\n') if job_list.has_key(block): job_list[block]['alignment'] = jobid else: job_list[block] = {'alignment': jobid} for jobname, jobid in merge_job_dict.items(): f.write(jobid + '\n') f.close() # write more descriptive json list of jobs with open('job_list.json', 'w') as f: f.write(json.dumps(job_list, indent=2)) # multilevel plugins preprocessing level blockprocessing.runplugins(plugins, env, pluginbasefolder, url_root, 'pre') # Watch status of jobs. As they finish remove the job from the list. pl_started = False alignment_job_list = alignment_job_dict.values() while len(alignment_job_list) > 0: for job in alignment_job_list: block = [block for block in blocks if block['jobid'] == job][0] #check status of jobid try: block['status'] = jobserver.jobstatus(block['jobid']) except: traceback.print_exc() continue