def validate_run(self, log, info, run_code, out): if "No decoys with label DECOY_ were found" in out: raise RuntimeError("No DECOY_s found in fasta. Please use other fasta!") validation.check_stdout(log, out) validation.check_exitcode(log, run_code) validation.check_xml(log, info[Keys.PEPXML]) return info
def validate_run(self, log, info, exit_code, stdout): validation.check_stdout(log,stdout) validation.check_exitcode(log, exit_code) validation.check_file(log, info['SPLIB']) validation.check_file(log, info['TSV']) validation.check_xml(log, info['TRAML']) return info
def validate_run(self, log, info, exit_code, stdout): if 'max_rt_diff = self._stdev_max_rt_per_run * tr_data.getStdev(source, target)' in stdout: raise RuntimeError("No peptides found which are shared between all runs. Try to increase 'alignment_score'.") validation.check_stdout(log, stdout) validation.check_exitcode(log, exit_code) validation.check_file(log, info['ALIGNMENT_TSV']) validation.check_file(log, info['ALIGNMENT_YAML']) out2log = os.path.join(info[Keys.WORKDIR], "feature_alignment.out.txt") f = open(out2log, "w") f.write(stdout) f.close() info["ALIGNER_STDOUT"] = out2log # Move out .tr files of pyprophet to be rescue safe info["TRAFO_FILES"] = [] for fil in info["MPROPHET_TSV"]: trfile = glob.glob(os.path.dirname(fil) + "/*.tr") if len(trfile) != 1: raise RuntimeError("More than one .tr file for " + fil) basename = os.path.basename(trfile[0]) tgt = os.path.join(info['WORKDIR'], basename) log.debug("Moved tr file %s into WORKDIR" % basename) shutil.move(trfile[0], tgt) info["TRAFO_FILES"].append(tgt) return info
def validate_run(self, log, info, run_code, out): for line in out.splitlines(): if "OpenMS peak type estimation indicates that this is not profile data!" in line: raise RuntimeError("Found centroid data but LFQ must be run on profile mode data!") validation.check_stdout(log, out) validation.check_exitcode(log, run_code) validation.check_xml(log, info['FEATUREXML']) return info
def validate_run(self, log, info, run_code, out): if "No decoys with label DECOY_ were found" in out: raise RuntimeError( "No DECOY_s found in fasta. Please use other fasta!") validation.check_stdout(log, out) validation.check_exitcode(log, run_code) validation.check_xml(log, info[Keys.PEPXML]) return info
def validate_run(self, log, info, exit_code, stdout): if "Warning - no spectra searched" in stdout: raise RuntimeError("No spectra in mzXML!") if "CometMemAlloc" in stdout: #print to stdout to reach gUSE rescue functionality. ugly, no? print "MemoryError" raise RuntimeError("The job run out of RAM!") check_stdout(log,stdout) check_exitcode(log, exit_code) check_xml(log, info[Keys.PEPXML]) return info
def validate_run(self, log, info, exit_code, stdout): if "ERROR [root] Parsing </precursorMz> failed. scan number" in stdout: raise ValueError("the chosen mzXML %s file contains " "MS2 spectra without precursor information" % info.get("MZXML")) validation.check_stdout(log, stdout) validation.check_exitcode(log, exit_code) info["MD5_SUMS"] = [] for p in info["Q_FILES"]: validation.check_file(log, p) md5sum = open(p + ".md5", "r").read().split(" ")[0].strip() info["MD5_SUMS"].append(md5sum) return info
def validate_run(self, log, info, exit_code, stdout): if "ERROR [root] Parsing </precursorMz> failed. scan number" in stdout: raise ValueError("the chosen mzXML %s file contains " "MS2 spectra without precursor information" % info.get("MZXML") ) validation.check_stdout(log, stdout) validation.check_exitcode(log, exit_code) info["MD5_SUMS"] = [] for p in info["Q_FILES"]: validation.check_file(log, p) md5sum = open(p + ".md5", "r").read().split(" ")[0].strip() info["MD5_SUMS"].append(md5sum) return info
def validate_run(self, log, info, exit_code, stdout): if "Warning - no spectra searched" in stdout: raise RuntimeError("No spectra in mzXML!") if "CometMemAlloc" in stdout: #print to stdout to reach gUSE rescue functionality. ugly, no? print "MemoryError" raise RuntimeError("The job run out of RAM!") check_stdout(log,stdout) if exit_code: log.warn("exit_code is %s", exit_code) mzxml = info[Keys.MZXML] log.warn("maybe the input file %s does not exist any more. check this !" % mzxml) check_exitcode(log, exit_code) check_xml(log, info[Keys.PEPXML]) return info
def validate_run(self, log, info, exit_code, stdout): if "Warning - no spectra searched" in stdout: raise RuntimeError("No spectra in mzXML!") if "CometMemAlloc" in stdout: #print to stdout to reach gUSE rescue functionality. ugly, no? print "MemoryError" raise RuntimeError("The job run out of RAM!") check_stdout(log, stdout) if exit_code: log.warn("exit_code is %s", exit_code) mzxml = info[Keys.MZXML] log.warn( "maybe the input file %s does not exist any more. check this !" % mzxml) check_exitcode(log, exit_code) check_xml(log, info[Keys.PEPXML]) return info
def validate_run(self, log, info, exit_code, stdout): validation.check_stdout(log, stdout) validation.check_exitcode(log, exit_code) base = os.path.join(info[Keys.WORKDIR], os.path.splitext(os.path.basename(info['FEATURETSV']))[0]) info['MPROPHET_TSV'] = base + "_with_dscore_filtered.csv" validation.check_file(log, info['MPROPHET_TSV']) prophet_stats = [] for end in ["_full_stat.csv", "_scorer.bin", "_weights.txt", "_report.pdf", "_dscores_top_target_peaks.txt", "_dscores_top_decoy_peaks.txt"]: f = base + end if os.path.exists(f): prophet_stats.append(f) if prophet_stats: info['MPROPHET_STATS'] = prophet_stats return info
def validate_run(self, log, info, exit_code, stdout): validation.check_stdout(log, stdout) validation.check_exitcode(log, exit_code) base = os.path.join( info[Keys.WORKDIR], os.path.splitext(os.path.basename(info['FEATURETSV']))[0]) info['MPROPHET_TSV'] = base + "_with_dscore_filtered.csv" validation.check_file(log, info['MPROPHET_TSV']) prophet_stats = [] for end in [ "_full_stat.csv", "_scorer.bin", "_weights.txt", "_report.pdf", "_dscores_top_target_peaks.txt", "_dscores_top_decoy_peaks.txt" ]: f = base + end if os.path.exists(f): prophet_stats.append(f) if prophet_stats: info['MPROPHET_STATS'] = prophet_stats return info
def validate_run(self, log, info, exit_code, stdout): validation.check_stdout(log, stdout) validation.check_exitcode(log, exit_code) validation.check_file(log, info['ALIGNMENT_MATRIX']) return info
def validate_run(self, log, info, exit_code, stdout): validation.check_stdout(log, stdout) validation.check_exitcode(log, exit_code) if info.get('DO_CHROMML_REQUANT', "") != "false": validation.check_file(log, info['REQUANT_TSV']) return info
def validate_run(self, log, info, exit_code, stdout): validation.check_exitcode(log, exit_code) validation.check_stdout(log, stdout) validation.check_xml(log, info[Keys.PEPXML]) return info
def validate_run(self, log, info, exit_code, stdout): validation.check_exitcode(log, exit_code) validation.check_stdout(log, stdout) return info
def validate_run(self, log, info, exit_code, stdout): validation.check_stdout(log,stdout) validation.check_exitcode(log, exit_code) validation.check_file(log, info['ALIGNMENT_MATRIX']) return info
def validate_run(self, log, info, exit_code, stdout): validation.check_exitcode(log, exit_code) validation.check_stdout(log,stdout) validation.check_xml(log, info[Keys.PEPXML]) return info