def __init__(self, config): """ @brief initiaise pyMSsafe controller @param mode <string>: operational mode of the application """ self.cfg = config dataDir = cfg.parameters['runtime']['datadir'] logParam = cfg.parameters['logging'] logPath = Path(dataDir.joinpath(logParam['logdir'])) if not logPath.exists(): logPath.mkdir(parents=True) logFile = logPath.joinpath(logParam['logfile']) self.logs = Logger(logFile, logParam['loglevel'], logParam['screenlevel'], False) self.logs.setLog('pyMSsafe')
if __name__ == "__main__": logger = 0 cfg = ConfigManager('./corrects2iquant.cfg') try: configErr = cfg.evaluateCommandLineArgs(sys.argv) dataDir = cfg.parameters['runtime']['datadir'] # start the logging process logParam = cfg.parameters['logging'] logPath = Path(dataDir.joinpath(logParam['logdir'])) if not logPath.exists(): logPath.mkdir(parents=True) logFile = logPath.joinpath(logParam['logfile']) logger = Logger(logFile, logParam['loglevel'], logParam['screenlevel'], False) logger.setLog('corrects2iquant') cfg.log = logger.log hdf5files = [Path(cfg.parameters['runtime']['datadir'], cfg.parameters['runtime']['filename'])] allpeptideratios = {} allsumionratiodata = {} corrects2iquantoblist = [] counter = 0 for hdf5Path in hdf5files: logger.log.info('hdf5Path %s' % hdf5Path) logger.log.info('starting') hdf5corrects2iquant = HDF5Results(hdf5Path) hdf5corrects2iquant.appendOpen() hdf5corrects2iquant.samplename = hdf5Path.name hdf5corrects2iquant.cfg = cfg hdf5corrects2iquant.addConfigParameters(cfg.parameters, 'postMascot', '4 corrects2iquant')
logs = 0 # try: cfg = ConfigManager('./mascotparser.cfg') ret = cfg.evaluateCommandLineArgs(sys.argv) try: cfg.scalePpmMda() dataDir = cfg.parameters['runtime']['datadir'] logParam = cfg.parameters['logging'] logPath = Path(dataDir.joinpath(logParam['logdir'])) if not logPath.exists(): logPath.mkdir(parents=True) logFile = logPath.joinpath(logParam['logfile']) logger = Logger(logFile, logParam['loglevel'], logParam['screenlevel'], False) logger.setMascotParserLogs() jobcontrol(cfg, logger) except ExHa.UsageError as useEx: ExHa.reformatException(useEx) print useEx.context except Exception as genEx: ExHa.reformatException(genEx) errorFile = Path(cfg.parameters['runtime']['hdf5file']).stem + '.error' ExHa.exportError2File( genEx, cfg.parameters['runtime']['datadir'].joinpath(errorFile)) if logs: logs.datlog.warning(ExHa.oneLineRepr(genEx)) else:
configPath = './mgf.cfg' cfg = pyMSsafeConfigManager(configPath) ret = cfg.evaluateCommandLineArgs(sys.argv) cfg.scalePpmMda() dataDir = cfg.parameters['runtime']['datadir'] fileFilter = cfg.parameters['runtime']['filefilter'] hcdOnly = cfg.parameters['general']['hcdonly'] logParam = cfg.parameters['logging'] logPath = Path(dataDir.joinpath(logParam['logdir'])) if not logPath.exists(): logPath.mkdir(parents=True) logFile = logPath.joinpath(logParam['logfile']) logger = Logger(logFile, logParam['loglevel'], logParam['screenlevel'], False) logger.setLog('mgfCreation') logger.log.info('Analysing: %s' % str(dataDir.joinpath(fileFilter))) if hcdOnly: logger.log.info('Exporting HCD data only') # for f in dataDir.files(fileFilter): for f in dataDir.glob(fileFilter): if not f.is_file(): # skip any directories continue # if f.name[:4] in ['6528', '1814', '2032']: continue mgf = mgftools(f) logger.log.info('Filename: %s' % f.name)
dataDir = cfg.parameters['runtime']['datadir'] pyMSsafeAppDir = str(Path('./pyMSsafe').resolve()) logParam = cfg.parameters['logging'] logPath = Path(dataDir.joinpath(logParam['logdir'])) if logPath.exists(): # clear existing logs for this part of the pipeline for log in logPath.glob('*.log'): if log.name in ['preMascot.log', 'pyMSsafe.log', 'mgfCreation.log']: log.unlink() else: logPath.mkdir(parents=True) logFile = logPath.joinpath(logParam['logfile']) logger = Logger(logFile, logParam['loglevel'], logParam['screenlevel']) # , pID) logger.setLog('preMascot') logger.log.log(logger.PROCESS, 'started preMascot application') logger.log.info('Removing previous errors from %s' % str(dataDir)) # clear previous error files for errorFile in dataDir.glob('*.error'): errorFile.unlink() logger.log.info('Analysing: %s' % str(dataDir)) rawFileList = [] for idx, raw in enumerate(dataDir.glob('*.raw')): rawFileList.append((raw, idx + 1)) logger.log.info('Found %i files to process' % len(rawFileList)) if not rawFileList: logger.log.critical('Found %i files to process' % len(rawFileList))
confile = installdir / Path('proteininference.cfg') cfg = ConfigManager(str(confile)) ret = cfg.evaluateCommandLineArgs(sys.argv) searches = sorted(cfg.parameters['runtime']['filelist']) dataDir = cfg.parameters['runtime']['datadir'] resultfile = dataDir.joinpath(cfg.parameters['runtime']['resultfilename']) try: # start the logging process logParam = cfg.parameters['logging'] logPath = Path(dataDir.joinpath(logParam['logdir'])) if not logPath.exists(): logPath.mkdir(parents=True) logFile = logPath.joinpath(logParam['logfile']) logger = Logger(logFile, logParam['loglevel'], logParam['screenlevel'], True) logger.setProtInferanceLogs() if resultfile.exists(): resultfile.unlink() expID = None quantMethID = cfg.parameters['runtime']['quantmethod_id'] if quantMethID: quantMethData = QuantMethods().getMethodByID(quantMethID) quantSource = quantMethData['source'] else: quantMethData = {} quantSource = '' hdf5results = HDF5Results(str(resultfile))
class pymssafe: def __init__(self, config): """ @brief initiaise pyMSsafe controller @param mode <string>: operational mode of the application """ self.cfg = config dataDir = cfg.parameters['runtime']['datadir'] logParam = cfg.parameters['logging'] logPath = Path(dataDir.joinpath(logParam['logdir'])) if not logPath.exists(): logPath.mkdir(parents=True) logFile = logPath.joinpath(logParam['logfile']) self.logs = Logger(logFile, logParam['loglevel'], logParam['screenlevel'], False) self.logs.setLog('pyMSsafe') def run(self, jobObj): """ @brief run the analysis @param job <object>: containing all the job data """ # needs to catch exceptions here so that LUX doesn't recieve an exception hdf = '' xraw = '' tempFile = '' config = self.cfg logs = self.logs try: hdf = '' maxspec = jobObj.args['maxspec'] logs.log.info("Starting PyMSSafe runner") # assign objects to the file interfaces rawpath = jobObj.srcpth.absolute() namebase = rawpath.stem hdfpath = jobObj.dstpth.absolute().joinpath(namebase + '.hdf5') runname = namebase tempFile = jobObj.dstpth.absolute().joinpath( str(config.parameters['runtime']['pid'])) if tempFile.exists(): tempFile.unlink() logs.log.info('started job: %s' % rawpath.name) watch = Stopwatch() if not rawpath.exists(): logs.log.info('could not find file: %s' % str(rawpath)) logs.log.info('Stopped') return { 'code': 1, 'error': 'could not find file: %s' % str(rawpath) } # get quant information if file is quan type quantMeth = self.extractQuant(jobObj.srcpth.name.upper()) if quantMeth == -1: raise ExHa.QuantificationMethodError( 'Unable to find "%s" quantification method' % self.cfg.parameters['runtime']['quant'].upper()) elif quantMeth == -2: raise ExHa.QuantificationMethodError( 'Unable to find valid quantification method in file name (%s)' % jobObj.srcpth.name.upper()) xraw = XRawFile(str(rawpath)) if config.parameters['general']['skipscanevents']: xraw.skipScanEvents = config.parameters['general'][ 'skipscanevents'] # opens hdf5 file for writing hdf = hdf5Base(hdfpath, True, True) hdf.appendOpen() self.createHDFtables(hdf) # save the config entries hdf.appendRows('/rawdata/config', config.convertConfig()) # create datamanager object config.rawfile = jobObj.srcpth dataman = Datamanager(xraw, config, logs, hdf, quantMeth, str(tempFile)) dataman.maxspec = maxspec dataman.addLUXdata(jobObj, config.parameters['runtime']['pid']) # run the analysis in the datamanager ok = dataman.processSpectra(quantMeth, watch, runname) if ok['code'] != 0: raise ExHa.SpectraProcessingError(ok['error']) # run the XIC generation logs.log.info('Processing XIC data for %d MS/MS events' % len(dataman.specs)) ok = dataman.processXICs() if ok['code'] != 0: raise ExHa.XICprocessingError(ok['error']) watch.rec('Processing XIC') logs.log.info('Writing HDF5 indexes') hdf.indexTable('/rawdata/msmsheader', ['spec_id']) hdf.close() xraw = '' hdf = '' watch.stop() logs.log.info('job took: %s' % watch.oneLineFormat()) tempFile.unlink() except ExHa.MSdataConsistancyError, msg: self.shutdown(hdf, xraw, tempFile) logs.log.warning('error: pyMSsafe Data error: %s' % msg) return {'code': 2, 'error': 'pyMSsafe Data error: %s' % msg} except ExHa.XICprocessingError, msg: self.shutdown(hdf, xraw, tempFile) logs.log.warning('error: pyMSsafe XIC error: %s' % msg) return {'code': 3, 'error': 'pyMSsafe XIC error: %s' % msg}
if __name__ == '__main__': configPath = './outputResults.cfg' cfg = ConfigManager(configPath) ret = cfg.evaluateCommandLineArgs(sys.argv) pID = os.getpid() dataDir = cfg.parameters['runtime']['datadir'] logParam = cfg.parameters['logging'] logPath = Path(dataDir.joinpath(logParam['logdir'])) if not logPath.exists(): logPath.mkdir(parents=True) logFile = logPath.joinpath(logParam['logfile']) logger = Logger(logFile, logParam['loglevel'], logParam['screenlevel'], False) logger.setLog('outputResults') logger.log.info('started outputResults application') filefiltervalue = cfg.parameters['runtime']['filefilter'] # this weird construct makes sure that we only try and get outputs from .hdf5 files # if hdf5 alrady in filefilter name then it's removed and readded, otherwise it's added filefiltervalue = filefiltervalue.replace('.hdf5', '') + '.hdf5' for hdf in dataDir.glob(filefiltervalue): outputResults(hdf) logger.log.info('finished outputResults')
if __name__ == "__main__": logger = 0 cfg = ConfigManager('./proteinquantification.cfg') try: configErr = cfg.evaluateCommandLineArgs(sys.argv) dataDir = cfg.parameters['runtime']['datadir'] # start the logging process logParam = cfg.parameters['logging'] logPath = Path(dataDir.joinpath(logParam['logdir'])) if not logPath.exists(): logPath.mkdir(parents=True) logFile = logPath.joinpath(logParam['logfile']) logger = Logger(logFile, logParam['loglevel'], logParam['screenlevel'], False) logger.setLog('proteinQuantification') cfg.log = logger.log hdf5files = [ Path(cfg.parameters['runtime']['datadir'], cfg.parameters['runtime']['filename']) ] counter = 0 # go through all results.hdf5 files in list: normally we should only have one for merged datasets for hdf5Path in hdf5files: logger.log.info('hdf5Path %s' % hdf5Path) logger.log.info('starting') hdf5quanprot = HDF5Results(hdf5Path) hdf5quanprot.appendOpen()