def initialize(self, params): """ expects:: {'nice':int, 'ferror':str, .. } @param params: initialisation parameters passed from the master @type params: dict """ self.__dict__.update(params) self.errorLog = LogFile(self.ferror, mode='a')
def initialize(self, params): """ Initialize AlignerSlave. @param params: dictionary with init parameters @type params: {param:value} """ self.__dict__.update(params) self.params = params self.errorLog = LogFile(self.ferror, mode='a')
def initialize(self, params): """ Initialize AlignerSlave. @param params: dictionary with init parameters @type params: {param:value} """ self.__dict__.update(params) self.params = params ## Only the PATH must be updated from the master to run properly os.environ["PATH"] = self.params['os.environ']["PATH"] self.errorLog = LogFile(self.ferror, mode='a')
class AlignerSlave(JobSlave): """ See also: Aligner.py, AlignerMaster.py """ def initialize(self, params): """ Initialize AlignerSlave. @param params: dictionary with init parameters @type params: {param:value} """ self.__dict__.update( params ) self.params = params ## Only the PATH must be updated from the master to run properly os.environ["PATH"]=self.params['os.environ']["PATH"] self.errorLog = LogFile( self.ferror, mode='a' ) def reportError(self, msg, d ): """ Report error. @param msg: error message @type msg: str @param d: error data @type d: any """ try: s = '%s on %s, job %r\n' % (msg, os.uname()[1], d) s += '\nErrorTrace:\n' + T.lastErrorTrace() + '\n' self.errorLog.add( s ) try: print msg except: pass except Exception, why: f = open('ErrorReportError_XRefineSlave','a') f.write( str(why) ) try: f.write( T.lastErrorTrace() ) except: pass f.close()
def go(self, dict): """ Run alignment job. @param dict: dictionary with run parameters @type dict: {param:value} """ d = {} val = None try: T.flushPrint(self.progress_str) for id, val in dict.items(): aligner_log = LogFile('%s/Aligner.log' % val["outFolder"]) d[id] = val aligner_log.add('Slave aligns %s on %s' % (id, os.uname()[1])) a = Aligner(outFolder=val["outFolder"], log=aligner_log) ## For the cross validation if not os.path.exists(val["outFolder"] + TC.F_COFFEE): input_file = val["outFolder"] + VS.F_TCOFFEE alpha_path = self.prepareT_coffee(input_file) a.align_for_modeller_inp( pdbFiles=alpha_path, fasta_templates=val["fastaTemplates"], fasta_sequences=val["fastaSequences"], fasta_target=val["fastaTarget"]) ## For a classic project folder else: a.align_for_modeller_inp( pdbFiles=val["pdbFiles"], fasta_templates=val["fastaTemplates"], fasta_sequences=val["fastaSequences"], fasta_target=val["fastaTarget"]) a.go() except Exception, why: self.reportError('ERROR ' + str(why), val)
def convertOptions(o): """ Translate commandline options where needed. You may need to add some entries here if you want to override exotic Executor parameters from the commandline. """ o['verbose'] = int(o.get('v', 1)) del o['v'] o['outFolder'] = tools.absfile(o.get('o', '.')) del o['o'] o['zfilter'] = float(o.get('zfilter', 0)) o['idfilter'] = float(o.get('idfilter', 0)) o['log'] = o.get('log', None) if o['log']: o['log'] = LogFile(o['o'] + '/' + options['log'], 'a') o['debug'] = int(o.get('debug', 0)) o['nice'] = int(o.get('nice', 0)) if 'ending_model' in o: o['ending_model'] = int(o['ending_model']) if 'starting_model' in o: o['starting_model'] = int(o['starting_model']) return o
def go(self, dict): """ Run Modeller job. @param dict: dictionary with run parameters @type dict: {param:value} """ d = {} val = None try: T.flushPrint(self.params['progress_str']) for id, val in dict.items(): modeller_log = LogFile('%s/Modeller.log' % val["outFolder"]) d[id] = val m = M(outFolder=val["outFolder"], fasta_target=val["fastaTarget"], f_pir=val["f_pir"], template_folder=val["template_folder"], starting_model=val["starting_model"], ending_model=val["ending_model"], log=modeller_log) m.run() except Exception, why: self.reportError('ERROR ' + str(why), val)
def go(self, dict): """ Run alignment job. @param dict: dictionary with run parameters @type dict: {param:value} """ d = {} val = None try: T.flushPrint( self.progress_str ) for id, val in dict.items(): aligner_log = LogFile( '%s/Aligner.log' %val["outFolder"] ) d[id] = val aligner_log.add('Slave aligns %s on %s' % (id,os.uname()[1]) ) a = Aligner( outFolder= val["outFolder"], log=aligner_log) ## For the cross validation if not os.path.exists(val["outFolder"] + TC.F_COFFEE): input_file = val["outFolder"] + VS.F_TCOFFEE alpha_path = self.prepareT_coffee(input_file) a.align_for_modeller_inp( pdbFiles=alpha_path, fasta_templates=val["fastaTemplates"], fasta_sequences=val["fastaSequences"], fasta_target=val["fastaTarget"]) ## For a classic project folder else: a.align_for_modeller_inp(pdbFiles=val["pdbFiles"], fasta_templates=val["fastaTemplates"], fasta_sequences=val["fastaSequences"], fasta_target=val["fastaTarget"]) a.go() except Exception, why: self.reportError( 'ERROR '+str(why), val )
class AlignerSlave(JobSlave): """ See also: Aligner.py, AlignerMaster.py """ def initialize(self, params): """ Initialize AlignerSlave. @param params: dictionary with init parameters @type params: {param:value} """ self.__dict__.update(params) self.params = params ## Only the PATH must be updated from the master to run properly os.environ["PATH"] = self.params['os.environ']["PATH"] self.errorLog = LogFile(self.ferror, mode='a') def reportError(self, msg, d): """ Report error. @param msg: error message @type msg: str @param d: error data @type d: any """ try: s = '%s on %s, job %r\n' % (msg, os.uname()[1], d) s += '\nErrorTrace:\n' + T.lastErrorTrace() + '\n' self.errorLog.add(s) try: print msg except: pass except Exception, why: f = open('ErrorReportError_XRefineSlave', 'a') f.write(str(why)) try: f.write(T.lastErrorTrace()) except: pass f.close()
class ModelSlave(JobSlave): def initialize(self, params): """ Initialize AlignerSlave. @param params: dictionary with init parameters @type params: {param:value} """ self.__dict__.update( params ) self.params = params self.errorLog = LogFile( self.ferror, mode='a' ) def reportError(self, msg, d ): """ Report error. @param msg: error message @type msg: str @param d: error data @type d: any """ try: s = '%s on %s, job %r\n' % (msg, os.uname()[1], d) s += '\nErrorTrace:\n' + T.lastErrorTrace() + '\n' self.errorLog.add( s ) try: print msg except: pass except Exception, why: f = open('ErrorReportError_XRefineSlave','a') f.write( str(why) ) try: f.write( T.lastErrorTrace() ) except: pass f.close()
def initialize(self, params): """ expects:: {'nice':int, 'ferror':str, .. } @param params: initialisation parameters passed from the master @type params: dict """ self.__dict__.update( params ) self.errorLog = LogFile( self.ferror, mode='a' )
def initialize(self, params): """ Initialize AlignerSlave. @param params: dictionary with init parameters @type params: {param:value} """ self.__dict__.update( params ) self.params = params self.errorLog = LogFile( self.ferror, mode='a' )
class ModelSlave(JobSlave): def initialize(self, params): """ Initialize AlignerSlave. @param params: dictionary with init parameters @type params: {param:value} """ self.__dict__.update(params) self.params = params self.errorLog = LogFile(self.ferror, mode='a') def reportError(self, msg, d): """ Report error. @param msg: error message @type msg: str @param d: error data @type d: any """ try: s = '%s on %s, job %r\n' % (msg, os.uname()[1], d) s += '\nErrorTrace:\n' + T.lastErrorTrace() + '\n' self.errorLog.add(s) try: print msg except: pass except Exception, why: f = open('ErrorReportError_XRefineSlave', 'a') f.write(str(why)) try: f.write(T.lastErrorTrace()) except: pass f.close()
class AmberEntropySlave( JobSlave ): """ Collect AmberEntropist jobs from AmberEntropyMaster and return result. """ def initialize(self, params): """ expects:: {'nice':int, 'ferror':str, .. } @param params: initialisation parameters passed from the master @type params: dict """ self.__dict__.update( params ) self.errorLog = LogFile( self.ferror, mode='a' ) def reportError(self, msg, id ): try: try: print msg except: pass msg = 'trouble with ' + msg s = '%s on %s, run %s\n' % (msg, os.uname()[1], id) s += '\Error:' + T.lastError() s += '\nErrorTrace:\n' + T.lastErrorTrace() + '\n' s += '\n' self.errorLog.add( s ) except Exception, why: f = open('ErrorReportError_AmberEntropySlave','a') f.write( str(type(why)) ) try: f.write( T.lastErrorTrace() ) except: pass f.close()
def initialize(self, params): """ Initialize AlignerSlave. @param params: dictionary with init parameters @type params: {param:value} """ self.__dict__.update( params ) self.params = params ## Only the PATH must be updated from the master to run properly os.environ["PATH"]=self.params['os.environ']["PATH"] self.errorLog = LogFile( self.ferror, mode='a' )
# MAIN ########################### if len(sys.argv) < 3: _use() options = tools.cmdDict(options) #options = testOptions() outFolder = tools.absfile(options['o']) f_target = tools.absfile(options['q']) f_target = f_target or outFolder + SequenceSearcher.F_FASTA_TARGET log = None if 'log' in options: log = LogFile(outFolder + '/modelling.log') ## databases used seq_db = 'swissprot' tmp_db = 'pdbaa' ############### ## SequenceSearcher ## ## Find homologues to the target sequence using blast against "seq_db" ## Cluster the seuences and write the result to nr.fasta ## input: target.fasta ## ## output: sequences/all.fasta ## /blast.out
def defaultOptions(): return {'o': '.', 'log': None, 'h': None} ### MAIN ### options = tools.cmdDict(defaultOptions()) outFolder = tools.absfile(options['o']) host = options['h'] sap = not 'nosap' in options log = None if options['log']: log = LogFile(outFolder + '/' + options['log'], 'a') if not (os.path.exists(outFolder + '/templates')): print 'Current directory is not a valid modeling folder (missing /templates).' _use(defaultOptions()) if '?' in options or 'help' in options: _use(defaultOptions()) ################### ## Aligner ## ## Create a sequence-structure alignment using T-coffee. ## Convert the alignment into Modeller compatible format ## input: sequences/nr.fasta
else: options[k] = 0 for k in ['chains', 'ex1', 'ex2', 'ex']: if k in options: options[k] = t.toIntList(options[k]) if 'atoms' in options: options['atoms'] = t.toList(options['atoms']) if 'ex1' in options and 'ex2' in options: options['ex'] = (options['ex1'], options['ex2']) else: options['ex'] = options.get('ex', options.get('ex1', None)) if 'log' in options: options['log'] = LogFile(options['log']) f_in = options['i'] del options['i'] a = AmberEntropist(f_in, **options) a.run() t.dump(a.result, options['o']) print "Dumped detailed result to %s. (for python unpickling)" % options['o'] print "Entropy in cal/mol-kelvin (total, vibrational): ", print a.result['S_total'], ',', a.result['S_vibes'] print