def sigproc(analysisArgs, pathtorawblock, SIGPROC_RESULTS): printtime("RUNNING SINGLE BLOCK ANALYSIS") command = "%s >> ReportLog.html 2>&1" % (analysisArgs) printtime("Analysis command: " + command) sys.stdout.flush() sys.stderr.flush() status = subprocess.call(command,shell=True) blockprocessing.add_status("Analysis", status) # write return code into file try: os.umask(0002) f = open(os.path.join(SIGPROC_RESULTS,"analysis_return_code.txt"), 'w') f.write(str(status)) f.close() except: traceback.print_exc() if status == 2: printtime("Analysis finished with status '%s'" % status) try: com = "ChkDat" com += " -r %s" % (pathtorawblock) # printtime("DEBUG: Calling '%s':" % com) # ret = subprocess.call(com,shell=True) except: traceback.print_exc() ######################################################## #Make Bead Density Plots # ######################################################## printtime("Make Bead Density Plots") bfmaskPath = os.path.join(SIGPROC_RESULTS,"analysis.bfmask.bin") maskpath = os.path.join(SIGPROC_RESULTS,"MaskBead.mask") if os.path.isfile(bfmaskPath): com = "BeadmaskParse" com += " -m MaskBead" com += " %s" % bfmaskPath ret = subprocess.call(com,shell=True) blockprocessing.add_status("BeadmaskParse", ret) try: shutil.move('MaskBead.mask', maskpath) except: printtime("ERROR: MaskBead.mask already moved") else: printtime("Warning: no analysis.bfmask.bin file exists.") if os.path.exists(maskpath): try: # Makes Bead_density_contour.png beadDensityPlot.genHeatmap(maskpath, SIGPROC_RESULTS) # os.remove(maskpath) except: traceback.print_exc() else: printtime("Warning: no MaskBead.mask file exists.") printtime("Finished single block analysis")
def update_bfmask_artifacts(bfmaskPath, bfmaskstatspath, SIGPROC_RESULTS, plot_title): import xmlrpclib from torrentserver import cluster_settings print("Starting Upload Analysis Metrics") cwd = os.getcwd() bfmaskstatspath = os.path.join(cwd, bfmaskstatspath) print(bfmaskPath) print(bfmaskstatspath) try: jobserver = xmlrpclib.ServerProxy("http://%s:%d" % (cluster_settings.JOBSERVER_HOST, cluster_settings.JOBSERVER_PORT), verbose=False, allow_none=True) primary_key_file = os.path.join(cwd,'primary.key') result = jobserver.uploadanalysismetrics(bfmaskstatspath, primary_key_file) print(result) print("Compelted Upload Analysis Metrics") except Exception as err: print("Error during analysis metrics upload %s" % err) traceback.print_exc() printtime("Make Bead Density Plots") try: beadDensityPlot.genHeatmap(bfmaskPath, bfmaskstatspath, SIGPROC_RESULTS, plot_title) except IOError as err: printtime("Bead Density Plot file error: %s" % err) except Exception as err: printtime("Bead Density Plot generation failure: %s" % err) traceback.print_exc()
def update_bfmask_artifacts(bfmaskPath, bfmaskstatspath, outputdir, plot_title): printtime("Make Bead Density Plots") try: beadDensityPlot.genHeatmap(bfmaskPath, bfmaskstatspath, outputdir, plot_title) except IOError as err: printtime("Bead Density Plot file error: %s" % err) except Exception as err: printtime("Bead Density Plot generation failure: %s" % err) traceback.print_exc()
def runBlock(env): STATUS = None basefolder = 'plugin_out' if not os.path.isdir(basefolder): os.umask(0000) #grant write permission to plugin user os.mkdir(basefolder) os.umask(0002) pathprefix = env["prefix"] libsff_filename = "rawlib.sff" tfsff_filename = "rawtf.sff" fastq_filename = "raw.fastq" bctrimmed_libsff_filename = "bctrimmed_rawlib.sff" fastq_path = os.path.join(BASECALLER_RESULTS, fastq_filename) libsff_path = os.path.join(BASECALLER_RESULTS, libsff_filename) tfsff_path = os.path.join(BASECALLER_RESULTS, tfsff_filename) bctrimmed_libsff_path = os.path.join(BASECALLER_RESULTS,bctrimmed_libsff_filename) tfmapperstats_path = os.path.join(BASECALLER_RESULTS,"TFMapper.stats") libKeyArg = "--libraryKey=%s" % env["libraryKey"] write_version() #------------------------------------------------------------- # Single Block data processing #------------------------------------------------------------- if runFromRaw: printtime("RUNNING SINGLE BLOCK ANALYSIS") command = "%s >> ReportLog.html 2>&1" % (env['analysisArgs']) printtime("Analysis command: " + command) sys.stdout.flush() sys.stderr.flush() status = subprocess.call(command,shell=True) #status = 2 STATUS = None if int(status) == 2: STATUS = 'Checksum Error' elif int(status) == 3: STATUS = 'No Live Beads' elif int(status) != 0: STATUS = 'ERROR' if STATUS != None: printtime("Analysis finished with status '%s'" % STATUS) #TODO - maybe create file # uploadMetrics.updateStatus(STATUS) #TODO ''' csp = os.path.join(env['pathToRaw'],'checksum_status.txt') if not os.path.exists(csp) and not env['skipchecksum'] and STATUS==None: try: os.umask(0002) f = open(csp, 'w') f.write(str(status)) f.close() except: traceback.print_exc() ''' printtime("Finished single block analysis") else: printtime('Skipping single block analysis') if runFromWells: tfKey = "ATCG" libKey = env['libraryKey'] floworder = env['flowOrder'] printtime("Using flow order: %s" % floworder) printtime("Using library key: %s" % libKey) if "block_" in mycwd: # Fix SFFTrim basecallerjson = os.path.join(BASECALLER_RESULTS, 'BaseCaller.json') r = subprocess.call(["ln", "-s", basecallerjson]) if r: printtime("couldn't create symbolic link") # Fix SFFMerge r = subprocess.call(["ln", "-s", os.path.join('..', SIGPROC_RESULTS, 'processParameters.txt'), os.path.join(BASECALLER_RESULTS, 'processParameters.txt')]) if r: printtime("couldn't create symbolic link") sys.stdout.flush() sys.stderr.flush() if not os.path.exists(libsff_path): printtime("ERROR: %s does not exist" % libsff_path) open('badblock.txt', 'w').close() ################################################## # Unfiltered SFF ################################################## unfiltered_dir = "unfiltered" if os.path.exists(unfiltered_dir): top_dir = os.getcwd() #change to the unfiltered dir os.chdir(os.path.join(top_dir,unfiltered_dir)) #grab the first file named untrimmed.sff try: untrimmed_sff = glob.glob("*.untrimmed.sff")[0] except IndexError: printtime("Error, unable to find the untrimmed sff file") #rename untrimmed to trimmed trimmed_sff = untrimmed_sff.replace("untrimmed.sff","trimmed.sff") # 3' adapter details qual_cutoff = env['reverse_primer_dict']['qual_cutoff'] qual_window = env['reverse_primer_dict']['qual_window'] adapter_cutoff = env['reverse_primer_dict']['adapter_cutoff'] adapter = env['reverse_primer_dict']['sequence'] # If flow order is missing, assume classic flow order: if floworder == "0": floworder = "TACG" printtime("warning: floworder redefine required. set to TACG") printtime("Unfiltered SFFTrim") try: com = "SFFTrim" com += " --in-sff %s --out-sff %s" % (untrimmed_sff,trimmed_sff) com += " --flow-order %s" % (floworder) com += " --key %s" % (libKey) com += " --qual-cutoff %s" % (qual_cutoff) com += " --qual-window-size %s" % (qual_window) com += " --adapter-cutoff %s" % (adapter_cutoff) com += " --adapter %s" % (adapter) com += " --min-read-len 5" printtime("DEBUG: Calling '%s'" % com) ret = subprocess.call(com,shell=True) if int(ret)!=0 and STATUS==None: STATUS='ERROR' except: printtime('Failed Unfiltered SFFTrim') sffs = glob.glob("*.sff") for sff in sffs: try: com = "SFFRead" com += " -q %s" % sff.replace(".sff",".fastq") com += " %s" % sff printtime("DEBUG: Calling '%s'" % com) ret = subprocess.call(com,shell=True) if int(ret)!=0 and STATUS==None: STATUS='ERROR' except: printtime('Failed to convert SFF' + str(sff) + ' to fastq') #trim status for status in ["untrimmed","trimmed"]: os.chdir(os.path.join(top_dir,unfiltered_dir)) if not os.path.exists(status): os.makedirs(status) os.chdir(os.path.join(top_dir,unfiltered_dir,status)) try: printtime("Trim Status",) align_full_chip_core("../*." + status + ".sff", libKey, tfKey, floworder, fastq_path, env['align_full'], -1, False, False, True, DIR_BC_FILES, env, ALIGNMENT_RESULTS) except OSError: printtime('Trim Status Alignment Failed to start') alignError = open("alignment.error", "w") alignError.write(str(traceback.format_exc())) alignError.close() traceback.print_exc() os.chdir(top_dir) else: printtime("Directory unfiltered does not exist") sys.stdout.flush() sys.stderr.flush() ################################################## # Trim the SFF file if it has been requested # ################################################## #only trim if SFF is false if not env['sfftrim']: printtime("Attempting to trim the SFF file") if not os.path.exists(libsff_path): printtime("ERROR: %s does not exist" % libsff_path) (head,tail) = os.path.split(libsff_path) libsff_trimmed_path = os.path.join(head,tail[:4] + "trimmed.sff") #we will always need the input and output files trimArgs = "--in-sff %s --out-sff %s" % (libsff_path,libsff_trimmed_path) qual_cutoff = env['reverse_primer_dict']['qual_cutoff'] qual_window = env['reverse_primer_dict']['qual_window'] adapter_cutoff = env['reverse_primer_dict']['adapter_cutoff'] adapter = env['reverse_primer_dict']['sequence'] if not env['sfftrim_args']: printtime("no args found, using default args") trimArgs += " --flow-order %s --key %s" % (floworder, libKey) trimArgs += " --qual-cutoff %d --qual-window-size %d --adapter-cutoff %d --adapter %s" % (qual_cutoff,qual_window,adapter_cutoff,adapter) trimArgs += " --min-read-len 5 " else: printtime("using non default args" , env['sfftrim_args']) trimArgs += " " + env['sfftrim_args'] try: com = "SFFTrim %s " % (trimArgs) printtime("DEBUG: call '%s':" % com) ret = subprocess.call(com,shell=True) if int(ret)!=0 and STATUS==None: STATUS='ERROR' except: printtime('Failed SFFTrim') #if the trim did not fail then move the untrimmed file to untrimmed.expname.sff #and move trimmed to expname.sff to ensure backwards compatability # don't rename, result will be useless for --fromsff runs # if os.path.exists(libsff_path): # try: # os.rename(libsff_path, "untrimmed." + libsff_path) #todo # except: # printtime("ERROR: renaming %s" % libsff_path) # if os.path.exists(libsff_trimmed_path): # try: # os.rename(libsff_trimmed_path, libsff_path) # except: # printtime("ERROR: renaming %s" % libsff_trimmed_path) else: printtime("Not attempting to trim the SFF") ##################################################### # Barcode trim SFF if barcodes have been specified # # Creates one fastq per barcode, plus unknown reads # ##################################################### if env['barcodeId'] is not '': try: com = "barcodeSplit" com += " -s" com += " -i %s" % libsff_path com += " -b barcodeList.txt" com += " -c barcodeMask.bin" com += " -f %s" % floworder printtime("DEBUG: Calling '%s'" % com) ret = subprocess.call(com,shell=True) if int(ret) != 0 and STATUS==None: STATUS='ERROR' else: # Rename bc trimmed sff if os.path.exists(bctrimmed_libsff_path): os.rename(bctrimmed_libsff_path, libsff_path) except: printtime("Failed barcodeSplit") ################################################## # Once we have the new SFF, run SFFSummary # to get the predicted quality scores ################################################## try: com = "SFFSummary" com += " -o %s" % os.path.join(BASECALLER_RESULTS, 'quality.summary') com += " --sff-file %s" % libsff_path com += " --read-length 50,100,150" com += " --min-length 0,0,0" com += " --qual 0,17,20" com += " -d %s" % os.path.join(BASECALLER_RESULTS, 'readLen.txt') printtime("DEBUG: Calling '%s'" % com) ret = subprocess.call(com,shell=True) if int(ret)!=0 and STATUS==None: STATUS='ERROR' except: printtime('Failed SFFSummary') ################################################## #make keypass.fastq file -c(cut key) -k(key flows)# ################################################## # create analysis progress bar file f = open('progress.txt','w') f.write('wellfinding = green\n') f.write('signalprocessing = green\n') f.write('basecalling = green\n') f.write('sffread = yellow\n') f.write('alignment = grey') f.close() try: com = "SFFRead" com += " -q %s" % fastq_path com += " %s" % libsff_path com += " > %s" % os.path.join(BASECALLER_RESULTS, 'keypass.summary') printtime("DEBUG: Calling '%s'" % com) ret = subprocess.call(com,shell=True) if int(ret)!=0 and STATUS==None: STATUS='ERROR' except: printtime('Failed SFFRead') ################################################## #generate TF Metrics # ################################################## printtime("Calling TFPipeline.processBlock") TFPipeline.processBlock(tfsff_filename, BASECALLER_RESULTS, SIGPROC_RESULTS, tfKey, floworder) printtime("Completed TFPipeline.processBlock") #printtime("Calling TFMapper") #try: # com = "TFMapper" # com += " --logfile TFMapper.log" # com += " --output-dir=%s" % (BASECALLER_RESULTS) # com += " --wells-dir=%s" % (SIGPROC_RESULTS) # com += " --sff-dir=%s" % (BASECALLER_RESULTS) # com += " --tfkey=%s" % (tfKey) # com += " %s" % (tfsff_filename) # com += " ./" # com += " > %s" % (tfmapperstats_path) # printtime("DEBUG: Calling '%s'" % com) # ret = subprocess.call(com,shell=True) # if int(ret)!=0 and STATUS==None: # STATUS='ERROR' #except: # printtime("ERROR: TFMapper failed") ######################################################## #generate the TF Metrics including plots # ######################################################## #printtime("generate the TF Metrics including plots") #if os.path.exists(tfmapperstats_path): # try: # # Q17 TF Read Length Plot # tfMetrics = parseTFstats.generateMetricsData(tfmapperstats_path) # tfGraphs.Q17(tfMetrics) # tfGraphs.genCafieIonograms(tfMetrics,floworder) # except Exception: # printtime("ERROR: Metrics Gen Failed") # traceback.print_exc() #else: # printtime("ERROR: %s doesn't exist" % tfmapperstats_path) ######################################################## #Generate Raw Data Traces for lib and TF keys # ######################################################## printtime("Generate Raw Data Traces for lib and TF keys(iontrace_Test_Fragment.png, iontrace_Library.png)") tfRawPath = 'avgNukeTrace_%s.txt' % tfKey libRawPath = 'avgNukeTrace_%s.txt' % libKey peakOut = 'raw_peak_signal' if os.path.exists(tfRawPath): try: kp = plotKey.KeyPlot(tfKey, floworder, 'Test Fragment') kp.parse(tfRawPath) kp.dump_max(peakOut) kp.plot() except: printtime("TF key graph didn't render") traceback.print_exc() if os.path.exists(libRawPath): try: kp = plotKey.KeyPlot(libKey, floworder, 'Library') kp.parse(libRawPath) kp.dump_max(peakOut) kp.plot() except: printtime("Lib key graph didn't render") traceback.print_exc() ######################################################## #Make Bead Density Plots # ######################################################## printtime("Make Bead Density Plots") bfmaskPath = os.path.join(SIGPROC_RESULTS,"bfmask.bin") maskpath = os.path.join(SIGPROC_RESULTS,"MaskBead.mask") if os.path.isfile(bfmaskPath): com = "BeadmaskParse" com += " -m MaskBead" com += " %s" % bfmaskPath ret = subprocess.call(com,shell=True) if int(ret)!=0 and STATUS==None: STATUS='ERROR' #TODO try: shutil.move('MaskBead.mask', maskpath) except: printtime("ERROR: MaskBead.mask already moved") else: printtime("Warning: no bfmask.bin file exists.") if os.path.exists(maskpath): try: # Makes Bead_density_contour.png beadDensityPlot.genHeatmap(maskpath, BASECALLER_RESULTS) # os.remove(maskpath) except: traceback.print_exc() else: printtime("Warning: no MaskBead.mask file exists.") sys.stdout.flush() sys.stderr.flush() ######################################################## # Make per region key incorporation traces # ######################################################## printtime("Make per region key incorporation traces") perRegionTF = "averagedKeyTraces_TF.txt" perRegionLib = "averagedKeyTraces_Lib.txt" if os.path.exists(perRegionTF): pr = plotRawRegions.PerRegionKey(tfKey, floworder,'TFTracePerRegion.png') pr.parse(perRegionTF) pr.plot() if os.path.exists(perRegionLib): pr = plotRawRegions.PerRegionKey(libKey, floworder,'LibTracePerRegion.png') pr.parse(perRegionLib) pr.plot() sys.stdout.flush() sys.stderr.flush() else: printtime('Skipping SFF Processing') if runFromSFF: ######################################################## #Attempt to align # ######################################################## printtime("Attempt to align") # create analysis progress bar file f = open('progress.txt','w') f.write('wellfinding = green\n') f.write('signalprocessing = green\n') f.write('basecalling = green\n') f.write('sffread = green\n') f.write('alignment = yellow') f.close() try: align_full_chip(libsff_path, libKey, tfKey, floworder, fastq_path, env['align_full'], DIR_BC_FILES, env, ALIGNMENT_RESULTS) except Exception: printtime("ERROR: Alignment Failed") traceback.print_exc() printtime("make the read length histogram") try: filepath_readLenHistogram = os.path.join(ALIGNMENT_RESULTS,'readLenHisto.png') trimmedReadLenHisto.trimmedReadLenHisto('readLen.txt',filepath_readLenHistogram) except: printtime("Failed to create %s" % filepath_readLenHistogram) ######################################################## #ParseFiles # ######################################################## printtime('ParseFiles') # create analysis progress bar file f = open('progress.txt','w') f.write('wellfinding = green\n') f.write('signalprocessing = green\n') f.write('basecalling = green\n') f.write('sffread = green\n') f.write('alignment = green') f.close() else: printtime('Skipping TMAP Processing')
def mergeSigProcResults(dirs, SIGPROC_RESULTS, plot_title, exclusionMask=''): bfmaskPath = os.path.join(SIGPROC_RESULTS,'analysis.bfmask.bin') bfmaskstatspath = os.path.join(SIGPROC_RESULTS,'analysis.bfmask.stats') ######################################################## # write composite return code # ######################################################## try: if len(dirs)==96: composite_return_code=96 for subdir in dirs: blockstatus_return_code_file = os.path.join(subdir,"blockstatus.txt") if os.path.exists(blockstatus_return_code_file): with open(blockstatus_return_code_file, 'r') as f: text = f.read() if 'Analysis=0' in text: composite_return_code-=1 composite_return_code_file = os.path.join(SIGPROC_RESULTS,"analysis_return_code.txt") if not os.path.exists(composite_return_code_file): printtime("DEBUG: create %s" % composite_return_code_file) os.umask(0002) f = open(composite_return_code_file, 'a') f.write(str(composite_return_code)) f.close() else: printtime("DEBUG: skip generation of %s" % composite_return_code_file) except: traceback.print_exc() ###################################################################### # Merge individual block bead metrics files and generate bead stats # ###################################################################### printtime("Merging individual block bead metrics files") try: cmd = 'BeadmaskMerge -i analysis.bfmask.bin -o ' + bfmaskPath if exclusionMask: cmd += ' -e %s' % exclusionMask for subdir in dirs: subdir = os.path.join(SIGPROC_RESULTS,subdir) if isbadblock(subdir, "Merging individual block bead metrics files"): continue bfmaskbin = os.path.join(subdir,'analysis.bfmask.bin') if os.path.exists(bfmaskbin): cmd = cmd + ' %s' % subdir else: printtime("ERROR: skipped %s" % bfmaskbin) printtime("DEBUG: Calling '%s'" % cmd) subprocess.call(cmd,shell=True) except: printtime("BeadmaskMerge failed") ''' Not needed: BeadmaskMerge will generate analysis.bfmask.stats with exclusion mask applied ############################################### # Merge individual block bead stats files # ############################################### printtime("Merging analysis.bfmask.stats files") try: bfmaskstatsfiles = [] for subdir in dirs: subdir = os.path.join(SIGPROC_RESULTS,subdir) if isbadblock(subdir, "Merging analysis.bfmask.stats files"): continue bfmaskstats = os.path.join(subdir,'analysis.bfmask.stats') if os.path.exists(bfmaskstats): bfmaskstatsfiles.append(bfmaskstats) else: printtime("ERROR: Merging bfmask.stats files: skipped %s" % bfmaskstats) StatsMerge.main_merge(bfmaskstatsfiles, bfmaskstatspath, True) except: printtime("ERROR: No analysis.bfmask.stats files were found to merge") traceback.print_exc() ''' ######################################################## #Make Bead Density Plots # ######################################################## printtime("Make Bead Density Plots (composite report)") printtime("DEBUG: generate composite heatmap") if os.path.exists(bfmaskPath): try: beadDensityPlot.genHeatmap(bfmaskPath, bfmaskstatspath, "./", plot_title) except: traceback.print_exc() else: printtime("Warning: no heatmap generated.") printtime("Finished mergeSigProcResults")
def mergeSigProcResults(dirs, SIGPROC_RESULTS, plot_title, exclusionMask=''): bfmaskPath = os.path.join(SIGPROC_RESULTS, 'analysis.bfmask.bin') bfmaskstatspath = os.path.join(SIGPROC_RESULTS, 'analysis.bfmask.stats') # # Merge individual block bead metrics files and generate bead stats # # printtime("Merging individual block bead metrics files") try: cmd = 'BeadmaskMerge -i analysis.bfmask.bin -o ' + bfmaskPath if exclusionMask: cmd += ' -e %s' % exclusionMask for subdir in dirs: subdir = os.path.join(SIGPROC_RESULTS, subdir) if isbadblock(subdir, "Merging individual block bead metrics files"): continue bfmaskbin = os.path.join(subdir, 'analysis.bfmask.bin') if os.path.exists(bfmaskbin): cmd = cmd + ' %s' % subdir else: printtime("ERROR: skipped %s" % bfmaskbin) printtime("DEBUG: Calling '%s'" % cmd) subprocess.call(cmd, shell=True) except: printtime("BeadmaskMerge failed") ''' Not needed: BeadmaskMerge will generate analysis.bfmask.stats with exclusion mask applied ############################################### # Merge individual block bead stats files # ############################################### printtime("Merging analysis.bfmask.stats files") try: bfmaskstatsfiles = [] for subdir in dirs: subdir = os.path.join(SIGPROC_RESULTS,subdir) if isbadblock(subdir, "Merging analysis.bfmask.stats files"): continue bfmaskstats = os.path.join(subdir,'analysis.bfmask.stats') if os.path.exists(bfmaskstats): bfmaskstatsfiles.append(bfmaskstats) else: printtime("ERROR: Merging bfmask.stats files: skipped %s" % bfmaskstats) StatsMerge.main_merge(bfmaskstatsfiles, bfmaskstatspath, True) except: printtime("ERROR: No analysis.bfmask.stats files were found to merge") traceback.print_exc() ''' # # Make Bead Density Plots # # printtime("Make Bead Density Plots (composite report)") printtime("DEBUG: generate composite heatmap") if os.path.exists(bfmaskPath): try: beadDensityPlot.genHeatmap(bfmaskPath, bfmaskstatspath, "./", plot_title) except: traceback.print_exc() else: printtime("Warning: no heatmap generated.") printtime("Finished mergeSigProcResults")
def mergeSigProcResults(dirs, pathToRaw, skipchecksum, SIGPROC_RESULTS): ##################################################### # Grab one of the processParameters.txt files # ##################################################### printtime("Merging processParameters.txt") for subdir in dirs: subdir = os.path.join(SIGPROC_RESULTS,subdir) ppfile = os.path.join(subdir,'processParameters.txt') printtime(ppfile) if os.path.isfile(ppfile): processParametersMerge.processParametersMerge(ppfile,True) break ######################################################## # write composite return code # ######################################################## composite_return_code=0 for subdir in dirs: if subdir == "block_X0_Y9331": continue if subdir == "block_X14168_Y9331": continue if subdir == "block_X0_Y0": continue if subdir == "block_X14168_Y0": continue try: f = open(os.path.join(SIGPROC_RESULTS,subdir,"analysis_return_code.txt"), 'r') analysis_return_code = int(f.read(1)) f.close() if analysis_return_code!=0: printtime("DEBUG: errors in %s " % subdir) composite_return_code=1 break except: traceback.print_exc() csp = os.path.join(pathToRaw,'checksum_status.txt') if not os.path.exists(csp) and not skipchecksum and len(dirs)==96: printtime("DEBUG: create checksum_status.txt") try: os.umask(0002) f = open(csp, 'w') f.write(str(composite_return_code)) f.close() except: traceback.print_exc() else: printtime("DEBUG: skip generation of checksum_status.txt") ################################################# # Merge individual block bead metrics files # ################################################# printtime("Merging individual block bead metrics files") try: _tmpfile = os.path.join(SIGPROC_RESULTS,'bfmask.bin') cmd = 'BeadmaskMerge -i bfmask.bin -o ' + _tmpfile for subdir in dirs: subdir = os.path.join(SIGPROC_RESULTS,subdir) if isbadblock(subdir, "Merging individual block bead metrics files"): continue bfmaskbin = os.path.join(subdir,'bfmask.bin') if os.path.exists(bfmaskbin): cmd = cmd + ' %s' % subdir else: printtime("ERROR: skipped %s" % bfmaskbin) printtime("DEBUG: Calling '%s'" % cmd) subprocess.call(cmd,shell=True) except: printtime("BeadmaskMerge failed (test fragments)") ############################################### # Merge individual block bead stats files # ############################################### printtime("Merging bfmask.stats files") try: bfmaskstatsfiles = [] for subdir in dirs: subdir = os.path.join(SIGPROC_RESULTS,subdir) if isbadblock(subdir, "Merging bfmask.stats files"): continue bfmaskstats = os.path.join(subdir,'bfmask.stats') if os.path.exists(bfmaskstats): bfmaskstatsfiles.append(subdir) else: printtime("ERROR: Merging bfmask.stats files: skipped %s" % bfmaskstats) StatsMerge.main_merge(bfmaskstatsfiles, True) #TODO shutil.move('bfmask.stats', SIGPROC_RESULTS) except: printtime("No bfmask.stats files were found to merge") ############################################### # Merge individual block MaskBead files # ############################################### # printtime("Merging MaskBead.mask files") # # try: # bfmaskfolders = [] # for subdir in dirs: # subdir = os.path.join(SIGPROC_RESULTS,subdir) # printtime("DEBUG: %s:" % subdir) # # if isbadblock(subdir, "Merging MaskBead.mask files"): # continue # # bfmaskbead = os.path.join(subdir,'MaskBead.mask') # if not os.path.exists(bfmaskbead): # printtime("ERROR: Merging MaskBead.mask files: skipped %s" % bfmaskbead) # continue # # bfmaskfolders.append(subdir) # # offset_str = "use_blocks" # MaskMerge.main_merge('MaskBead.mask', bfmaskfolders, merged_bead_mask_path, True, offset_str) # except: # printtime("Merging MaskBead.mask files failed") ######################################################## #Make Bead Density Plots # ######################################################## printtime("Make Bead Density Plots (composite report)") bfmaskPath = os.path.join(SIGPROC_RESULTS,'bfmask.bin') maskpath = os.path.join(SIGPROC_RESULTS,'MaskBead.mask') # skip if merged MaskBead.mask exists TODO printtime("generate MaskBead.mask") if os.path.isfile(bfmaskPath): com = "BeadmaskParse -m MaskBead %s" % bfmaskPath os.system(com) #TODO try: shutil.move('MaskBead.mask', maskpath) except: printtime("ERROR: MaskBead.mask already moved") else: printtime("Warning: %s doesn't exists." % bfmaskPath) printtime("generate graph") if os.path.exists(maskpath): try: # Makes Bead_density_contour.png beadDensityPlot.genHeatmap(maskpath, SIGPROC_RESULTS) # todo, takes too much time # os.remove(maskpath) except: traceback.print_exc() else: printtime("Warning: no MaskBead.mask file exists.")
def mergeSigProcResults(dirs, SIGPROC_RESULTS, plot_title): bfmaskPath = os.path.join(SIGPROC_RESULTS,'analysis.bfmask.bin') bfmaskstatspath = os.path.join(SIGPROC_RESULTS,'analysis.bfmask.stats') ######################################################## # write composite return code # ######################################################## try: if len(dirs)==96: composite_return_code=96 for subdir in dirs: blockstatus_return_code_file = os.path.join(subdir,"blockstatus.txt") if os.path.exists(blockstatus_return_code_file): with open(blockstatus_return_code_file, 'r') as f: text = f.read() if 'Analysis=0' in text: composite_return_code-=1 composite_return_code_file = os.path.join(SIGPROC_RESULTS,"analysis_return_code.txt") if not os.path.exists(composite_return_code_file): printtime("DEBUG: create %s" % composite_return_code_file) os.umask(0002) f = open(composite_return_code_file, 'a') f.write(str(composite_return_code)) f.close() else: printtime("DEBUG: skip generation of %s" % composite_return_code_file) except: traceback.print_exc() ################################################# # Merge individual block bead metrics files # ################################################# printtime("Merging individual block bead metrics files") try: cmd = 'BeadmaskMerge -i analysis.bfmask.bin -o ' + bfmaskPath for subdir in dirs: subdir = os.path.join(SIGPROC_RESULTS,subdir) if isbadblock(subdir, "Merging individual block bead metrics files"): continue bfmaskbin = os.path.join(subdir,'analysis.bfmask.bin') if os.path.exists(bfmaskbin): cmd = cmd + ' %s' % subdir else: printtime("ERROR: skipped %s" % bfmaskbin) printtime("DEBUG: Calling '%s'" % cmd) subprocess.call(cmd,shell=True) except: printtime("BeadmaskMerge failed") ############################################### # Merge individual block bead stats files # ############################################### printtime("Merging analysis.bfmask.stats files") try: bfmaskstatsfiles = [] for subdir in dirs: subdir = os.path.join(SIGPROC_RESULTS,subdir) if isbadblock(subdir, "Merging analysis.bfmask.stats files"): continue bfmaskstats = os.path.join(subdir,'analysis.bfmask.stats') if os.path.exists(bfmaskstats): bfmaskstatsfiles.append(bfmaskstats) else: printtime("ERROR: Merging bfmask.stats files: skipped %s" % bfmaskstats) StatsMerge.main_merge(bfmaskstatsfiles, bfmaskstatspath, True) except: printtime("ERROR: No analysis.bfmask.stats files were found to merge") traceback.print_exc() ######################################################## #Make Bead Density Plots # ######################################################## printtime("Make Bead Density Plots (composite report)") printtime("DEBUG: generate composite heatmap") if os.path.exists(bfmaskPath): try: # Makes Bead_density_contour.png, TODO have to read multiple blocks beadDensityPlot.genHeatmap(bfmaskPath, bfmaskstatspath, SIGPROC_RESULTS, plot_title) except: traceback.print_exc() else: printtime("Warning: no heatmap generated.") ############################################### # Merge raw_peak_signal files # ############################################### printtime("Merging raw_peak_signal files") try: raw_peak_signal_files = [] for subdir in dirs: printtime("DEBUG: %s:" % subdir) if isbadblock(subdir, "Merging raw_peak_signal files"): continue raw_peak_signal_file = os.path.join(subdir,'raw_peak_signal') if os.path.exists(raw_peak_signal_file): raw_peak_signal_files.append(raw_peak_signal_file) else: printtime("ERROR: Merging raw_peak_signal files: skipped %s" % raw_peak_signal_file) composite_raw_peak_signal_file = "raw_peak_signal" blockprocessing.merge_raw_key_signals(raw_peak_signal_files, composite_raw_peak_signal_file) except: printtime("Merging raw_peak_signal files failed") printtime("Finished sigproc merging")