def __init__( self, ref_model, models, **kw ): """ @param ref_model: reference @type ref_model: PDBModel @param models: structures to be compared with reference @type models: [PDBModel] @param kw: additional key=value parameters for Executor: @type kw: key=value pairs :: debug - 0|1, keep all temporary files (default: 0) verbose - 0|1, print progress messages to log (log != STDOUT) node - str, host for calculation (None->local) NOT TESTED (default: None) nice - int, nice level (default: 0) log - Biskit.LogFile, program log (None->STOUT) (default: None) """ Executor.__init__( self, 'icmbrowser', template=self.inp_head, **kw ) self.f_ref = tempfile.mktemp('_icmcad_ref.pdb') self.f_pdb = tempfile.mktemp('_icmcad_%i.pdb') self.ref_model = ref_model self.models = models if not isinstance( self.models, list ): self.models = [ self.models ]
def __init__(self, model, **kw): """ @param model: model analyze @type model: PDBModel @param kw: additional key=value parameters for Executor: @type kw: key=value pairs :: debug - 0|1, keep all temporary files (default: 0) verbose - 0|1, print progress messages to log (log != STDOUT) node - str, host for calculation (None->local) NOT TESTED (default: None) nice - int, nice level (default: 0) log - Biskit.LogFile, program log (None->STOUT) (default: None) """ self.model = model # self.model = model.clone( deepcopy=1 ) ## temporary pdb-file self.f_pdb = tempfile.mktemp('_dssp.pdb') self.f_out = tempfile.mktemp('_dssp.out') Executor.__init__(self, 'dsspcmbi', args='-i %s' % self.f_pdb, catch_err=1, **kw)
def __init__(self, full=0, mode='w', verbose=1, **kw ): """ @param mode: open file with this mode, w=override, a=append @type mode: str @param full: dispaly pymol structures in fill screen mode:: 0 - normal mode 1 - full screen mode 2 - full screen and no menues @type full: 0|1|2 """ self.verbose = verbose # name of .pml file self.foutName = tempfile.mktemp() + '.pml' # open for <appending|writing|reading> self.fgenerate = open(self.foutName, mode) # will contain PymolModels or lists of PymolModels self.dic = {} ## add startup commands self.initPymol() ## set arguments for display options (normal, full, all) arg = '-q %s'%self.foutName if full == 1: arg = '-qe %s'%self.foutName if full == 2: arg = '-qei %s'%self.foutName Executor.__init__( self, 'pymol', args=arg, catch_err=1, catch_out=1, **kw )
def __init__(self, target, hmmdb=settings.hmm_db, noSearch=None, **kw): """ @param target: fasta sequence, fasta file, or PDBModel @type target: PDBModel or str (fasta file) or [ str ] (fasta lines) @param hmmdb: Pfam hmm database @type hmmdb: str @param noSearch: don't perform a seach @type noSearch: 1 OR None """ self.hmmdb = hmmdb Executor.__init__(self, 'hmmpfam', f_in=tempfile.mktemp('.fasta'), catch_out=1, **kw) self.target = target self.fastaID = '' if noSearch: if self.verbose: self.log.writeln( 'Profiles provided - No search will be performed.')
def __init__(self, model, **kw ): """ @param model: reference PDBModel @type model: PDBModel @param kw: additional key=value parameters for Executor: @type kw: key=value pairs :: debug - 0|1, keep all temporary files (default: 0) verbose - 0|1, print progress messages to log (log != STDOUT) node - str, host for calculation (None->local) NOT TESTED (default: None) nice - int, nice level (default: 0) log - Biskit.LogFile, program log (None->STOUT) (default: None) """ self.temp_pdb = tempfile.mktemp('_foldx_.pdb') self.temp_command = tempfile.mktemp('_foldx_.command') self.temp_option = tempfile.mktemp('_foldx_.option') self.temp_result = tempfile.mktemp('_foldx_.result') self.temp_runlog = tempfile.mktemp('_foldx_.log') self.temp_errlog = tempfile.mktemp('_foldx_.err') Executor.__init__( self, 'fold_X', args='-manual %s %s %s'\ %(self.temp_pdb, self.temp_option, self.temp_command), **kw ) self.model = model.clone() ## fold-X-allowed atoms for each res in standard order self.aminoAcidDict = molUtils.aaAtoms for k in self.aminoAcidDict: if 'HN' not in self.aminoAcidDict[ k ]: self.aminoAcidDict[ k ] += ['HN']
def prepare( self ): """ Overrides Executor method. """ Executor.prepare( self ) self.__prepareFolder() ## if setGrid hasn't been called yet, create automatic grid if not self.gsize: self.setGrid() if self.protonate: reducer = Reduce( self.model, verbose=self.verbose, autocap=self.autocap, tempdir=self.tempdir, cwd=self.cwd, log=self.log, debug=self.debug ) if self.verbose: self.log.add('adding hydrogen atoms to input structure\n') self.delphimodel = reducer.run() else: self.delphimodel = self.model.clone() self.delphimodel.xplor2amber() if not os.path.exists( self.f_charges ): self.__prepareCharges( self.f_charges ) self.delphimodel.writePdb( self.f_pdb )
def finish( self ): """ Overrides Executor method """ Executor.finish( self ) self.result = self.parse_result() ## if probe radius other than 1.4 A the relative surface exposure ## cannot be calculated, but allow this check to be a little flexible ## if we ate forced to slightly increase the radii to excape round off ## SurfaceRacer errors try: if round(self.probe, 1) == 1.4 and self.vdw_set == 1: self.__relExposure('MS') self.__relExposure('AS') else: EHandler.warning("No relative accessabilities calculated "+\ "when using a prob radius other than 1.4 A"+\ " or not using the Richards vdw radii set.") except KeyError, what: EHandler.warning("Missing standard accessibilities for some "+\ "atoms. No relative accesibilities calculated.") if 'relMS' in self.result: del self.result['relMS'] if 'relAS' in self.result: del self.result['relAS']
def cleanup( self ): Executor.cleanup( self ) if not self.debug: T.tryRemove( self.prosaPdbFile ) T.tryRemove( self.f_in) T.tryRemove( self.prosaOutput + '.ana' )
def parm2pdb(self, f_parm, f_crd, f_out, aatm=0): """ Use ambpdb to build PDB from parm and crd. @param f_parm: existing parm file @type f_parm: str @param f_crd: existing crd file @type f_crd: str @param f_out: target file name for PDB @type f_out: str @return: f_out, target file name for PDB @rtype: str @raise AmberError: if ambpdb fail """ ## cmd = '%s -p %s -aatm < %s > %s' % \ args = '-p %s %s' % (f_parm, '-aatm' * aatm) x = Executor('ambpdb', args, f_in=f_crd, f_out=f_out, log=self.log, verbose=1, catch_err=1) output, error, status = x.run() if not os.path.exists(f_out): raise AmberError, 'ambpdb failed.' return f_out
def __init__( self, model, **kw ): """ @param model: model analyze @type model: PDBModel @param kw: additional key=value parameters for Executor: @type kw: key=value pairs :: debug - 0|1, keep all temporary files (default: 0) verbose - 0|1, print progress messages to log (log != STDOUT) node - str, host for calculation (None->local) NOT TESTED (default: None) nice - int, nice level (default: 0) log - Biskit.LogFile, program log (None->STOUT) (default: None) """ self.model = model # self.model = model.clone( deepcopy=1 ) ## temporary pdb-file self.f_pdb = tempfile.mktemp( '_dssp.pdb') self.f_out = tempfile.mktemp( '_dssp.out') Executor.__init__( self, 'dsspcmbi', args='-na %s'%self.f_pdb, catch_err=1, **kw )
def __init__( self, model, refmodel, **kw ): """ @param model: structure to be aligned to reference @type model: PDBModel @param refmodel: reference structure @type refmodel: PDBModel @param kw: additional key=value parameters for Executor: @type kw: key=value pairs :: debug - 0|1, keep all temporary files (default: 0) verbose - 0|1, print progress messages to log (log != STDOUT) node - str, host for calculation (None->local) NOT TESTED (default: None) nice - int, nice level (default: 0) log - Biskit.LogFile, program log (None->STOUT) (default: None) """ self.f_pdbin = tempfile.mktemp( '_tmalign_in.pdb' ) self.f_pdbref= tempfile.mktemp( '_tmalign_ref.pdb' ) self.f_matrix= tempfile.mktemp( '_tmalign_matrix.out' ) Executor.__init__( self, 'tmalign', args= '%s %s -m %s' % (self.f_pdbin, self.f_pdbref, self.f_matrix), **kw ) self.refmodel = refmodel self.model = model
def __init__( self, model, **kw ): """ @param model: PDBModel @type model: @param kw: additional key=value parameters for Executor: @type kw: key=value pairs :: debug - 0|1, keep all temporary files (default: 0) verbose - 0|1, print progress messages to log (log != STDOUT) node - str, host for calculation (None->local) NOT TESTED (default: None) nice - int, nice level (default: 0) log - Biskit.LogFile, program log (None->STOUT) (default: None) """ self.f_xyzrn = tempfile.mktemp('_msms.xyzrn') ## output file from MSMS, will add .area exiension to file self.f_surf = tempfile.mktemp( ) arg =' -surface ases -if %s -af %s'%( self.f_xyzrn, self.f_surf ) Executor.__init__( self, 'msms', args=arg, **kw ) self.model = model.clone()
def __init__(self, ref_model, models, **kw): """ @param ref_model: reference @type ref_model: PDBModel @param models: structures to be compared with reference @type models: [PDBModel] @param kw: additional key=value parameters for Executor: @type kw: key=value pairs :: debug - 0|1, keep all temporary files (default: 0) verbose - 0|1, print progress messages to log (log != STDOUT) node - str, host for calculation (None->local) NOT TESTED (default: None) nice - int, nice level (default: 0) log - Biskit.LogFile, program log (None->STOUT) (default: None) """ Executor.__init__(self, 'icmbrowser', template=self.inp_head, **kw) self.f_ref = tempfile.mktemp('_icmcad_ref.pdb') self.f_pdb = tempfile.mktemp('_icmcad_%i.pdb') self.ref_model = ref_model self.models = models if not isinstance(self.models, list): self.models = [self.models]
def cleanup(self): Executor.cleanup(self) if not self.debug: T.tryRemove(self.prosaPdbFile) T.tryRemove(self.f_in) T.tryRemove(self.prosaOutput + '.ana')
def prepare(self): """ Overrides Executor method. """ Executor.prepare(self) self.__prepareFolder() ## if setGrid hasn't been called yet, create automatic grid if not self.gsize: self.setGrid() if self.protonate: reducer = Reduce(self.model, verbose=self.verbose, autocap=self.autocap, tempdir=self.tempdir, cwd=self.cwd, log=self.log, debug=self.debug) if self.verbose: self.log.add('adding hydrogen atoms to input structure\n') self.delphimodel = reducer.run() else: self.delphimodel = self.model.clone() self.delphimodel.xplor2amber() if not os.path.exists(self.f_charges): self.__prepareCharges(self.f_charges) self.delphimodel.writePdb(self.f_pdb)
def __init__(self, model, **kw): """ @param model: PDBModel @type model: @param kw: additional key=value parameters for Executor: @type kw: key=value pairs :: debug - 0|1, keep all temporary files (default: 0) verbose - 0|1, print progress messages to log (log != STDOUT) node - str, host for calculation (None->local) NOT TESTED (default: None) nice - int, nice level (default: 0) log - Biskit.LogFile, program log (None->STOUT) (default: None) """ Executor.__init__(self, 'whatif', template=self.whatif_script, f_out='/dev/null', **kw) self.f_pdb = tempfile.mktemp('_whatif.pdb') self.f_relativeASA = tempfile.mktemp('_whatif_relative.log') self.f_residueASA = tempfile.mktemp('_whatif_residue.log') self.model = model.clone()
def parm2pdb( self, f_parm, f_crd, f_out, aatm=0 ): """ Use ambpdb to build PDB from parm and crd. @param f_parm: existing parm file @type f_parm: str @param f_crd: existing crd file @type f_crd: str @param f_out: target file name for PDB @type f_out: str @return: f_out, target file name for PDB @rtype: str @raise AmberError: if ambpdb fail """ ## cmd = '%s -p %s -aatm < %s > %s' % \ args = '-p %s %s' % (f_parm, '-aatm'*aatm ) x = Executor('ambpdb', args, f_in=f_crd, f_out=f_out, log=self.log, verbose=1, catch_err=1) output,error,status = x.run() if not os.path.exists( f_out ): raise AmberError, 'ambpdb failed.' return f_out
def __init__(self, model, refmodel, **kw): """ @param model: structure to be aligned to reference @type model: PDBModel @param refmodel: reference structure @type refmodel: PDBModel @param kw: additional key=value parameters for Executor: @type kw: key=value pairs :: debug - 0|1, keep all temporary files (default: 0) verbose - 0|1, print progress messages to log (log != STDOUT) node - str, host for calculation (None->local) NOT TESTED (default: None) nice - int, nice level (default: 0) log - Biskit.LogFile, program log (None->STOUT) (default: None) """ self.f_pdbin = tempfile.mktemp('_tmalign_in.pdb') self.f_pdbref = tempfile.mktemp('_tmalign_ref.pdb') self.f_matrix = tempfile.mktemp('_tmalign_matrix.out') Executor.__init__(self, 'tmalign', args='%s %s -m %s' % (self.f_pdbin, self.f_pdbref, self.f_matrix), **kw) self.refmodel = refmodel self.model = model
def cleanup( self ): """ Tidy up the mess you created. """ Executor.cleanup( self ) if not self.debug: T.tryRemove( self.f_pdb )
def finish( self ): """ Overrides Executor method """ Executor.finish( self ) self.raw_result = self.parse_file() self.parse_lines( self.raw_result ) self.result = self.model
def cleanup(self): """ Tidy up the mess you created. """ Executor.cleanup(self) if not self.debug: T.tryRemove(self.f_pdb)
def __init__(self, model, tempdir=None, args='', autocap=False, capN=[], capC=[], **kw): """ @param model: structure to be aligned to reference @type model: PDBModel @param tempdir: create dedicated temporary folder (default: None) see Executor @param tempdir: str | 0|1 @param args: additional command line arguments for reduce (default:'') example: '-OLDpdb' @type args: str @param autocap: add capping NME and ACE residues to any (auto-detected) false N- or C-terminal (default: False) @type autocap: bool @param capN: cap N-terminal of these chains (indices) with ACE ([]) @type capN: [ int ] @param capC: cap C-terminal of these chains (indices) with NME ([]) @type capN: [ int ] @param kw: additional key=value parameters for Executor: @type kw: key=value pairs :: debug - 0|1, keep all temporary files (default: 0) verbose - 0|1, print progress messages to log (log != STDOUT) node - str, host for calculation (None->local) NOT TESTED (default: None) nice - int, nice level (default: 0) log - Biskit.LogFile, program log (None->STOUT) (default: None) """ tempdir = self.newtempfolder(tempdir) self.f_pdbin = tempfile.mktemp('_in.pdb', 'reduce_', dir=tempdir) f_out = tempfile.mktemp('_out.pdb', 'reduce_', dir=tempdir) self.f_db = T.dataRoot() + '/reduce/reduce_wwPDB_het_dict.txt' self.autocap = autocap self.capN = capN self.capC = capC lenchains = model.lenChains() Executor.__init__( self, 'reduce', args= '%s -BUILD -Nterm%i -DB %s %s' %\ (args, lenchains, self.f_db, self.f_pdbin), f_out=f_out, catch_err=True, tempdir=tempdir, **kw ) self.model = model
def cleanup(self): """ remove temporary files """ Executor.cleanup(self) if not self.debug: T.tryRemove(self.inp1) T.tryRemove(self.inp2)
def cleanup(self): """ Tidy up the mess you created. Does nothing. No temporary files are created. """ Executor.cleanup(self) if not self.debug: T.tryRemove(self.f_pdbin) T.tryRemove(self.f_out)
def cleanup( self ): """ Remove temp files. """ Executor.cleanup( self ) if not self.debug: T.tryRemove( self.f_xyzrn ) T.tryRemove( self.f_surf + '.area' )
def cleanup(self): """ remove temporary files """ Executor.cleanup( self ) if not self.debug: T.tryRemove( self.inp1 ) T.tryRemove( self.inp2 )
def cleanup( self ): """ Tidy up the mess you created. Does nothing. No temporary files are created. """ Executor.cleanup( self ) if not self.debug: T.tryRemove( self.f_pdbin ) T.tryRemove( self.f_out)
def cleanup( self ): """ Tidy up the mess we created. Called after program execution. """ Executor.cleanup( self ) if not self.debug: ## remove all files created by intervor for f in glob.glob( self.f_prefix + '*' ): T.tryRemove( f )
def cleanup(self): """ Tidy up the mess we created. Called after program execution. """ Executor.cleanup(self) if not self.debug: ## remove all files created by intervor for f in glob.glob(self.f_prefix + '*'): T.tryRemove(f)
def prepare(self, cleanUp=1): """ Overrides Executor method. """ Executor.prepare(self) self.flush() ## important to avoid empty input file ## Write PDB's to disc if needed self.writeStructures()
def cleanup( self ): """ Tidy up the mess you created. """ Executor.cleanup( self ) if not self.debug: T.tryRemove( self.f_ref ) for i in range( len(self.models)): T.tryRemove( self.f_pdb % i )
def cleanup(self): """ Tidy up the mess you created. """ Executor.cleanup(self) if not self.debug: T.tryRemove(self.f_ref) for i in range(len(self.models)): T.tryRemove(self.f_pdb % i)
def cleanup( self ): """ Tidy up the mess you created. """ Executor.cleanup( self ) if not self.debug: T.tryRemove( self.f_pdb, verbose=self.verbose ) T.tryRemove( self.f_out_name, verbose=self.verbose ) T.tryRemove( os.path.join(self.cwd, 'result.txt'), verbose=self.verbose ) T.tryRemove( self.f_pdb[:-4]+'_residue.txt', verbose=self.verbose)
def __init__( self, model, tempdir=None, args='', autocap=False, capN=[], capC=[], **kw ): """ @param model: structure to be aligned to reference @type model: PDBModel @param tempdir: create dedicated temporary folder (default: None) see Executor @param tempdir: str | 0|1 @param args: additional command line arguments for reduce (default:'') example: '-OLDpdb' @type args: str @param autocap: add capping NME and ACE residues to any (auto-detected) false N- or C-terminal (default: False) @type autocap: bool @param capN: cap N-terminal of these chains (indices) with ACE ([]) @type capN: [ int ] @param capC: cap C-terminal of these chains (indices) with NME ([]) @type capN: [ int ] @param kw: additional key=value parameters for Executor: @type kw: key=value pairs :: debug - 0|1, keep all temporary files (default: 0) verbose - 0|1, print progress messages to log (log != STDOUT) node - str, host for calculation (None->local) NOT TESTED (default: None) nice - int, nice level (default: 0) log - Biskit.LogFile, program log (None->STOUT) (default: None) """ tempdir = self.newtempfolder( tempdir ) self.f_pdbin = tempfile.mktemp( '_in.pdb', 'reduce_', dir=tempdir ) f_out= tempfile.mktemp( '_out.pdb', 'reduce_', dir=tempdir) self.f_db = T.dataRoot() + '/reduce/reduce_wwPDB_het_dict.txt' self.autocap = autocap self.capN = capN self.capC = capC lenchains = model.lenChains() Executor.__init__( self, 'reduce', args= '%s -BUILD -Nterm%i -DB %s %s' %\ (args, lenchains, self.f_db, self.f_pdbin), f_out=f_out, catch_err=True, tempdir=tempdir, **kw ) self.model = model
def __init__( self, hmmFile, fastaFile, fastaID, **kw ): """ @param hmmFile: path to hmm file (profile) @type hmmFile: str @param fastaFile: path to fasta search sequence @type fastaFile: str @param fastaID: fasta id of search sequence @type fastaID: str """ self.fastaID = fastaID Executor.__init__( self, 'hmmalign', args=' -q %s %s'%(hmmFile, fastaFile), **kw )
def __init__(self, hmmdb, **kw): """ @param hmmdb: Pfam hmm database @type hmmdb: str """ Executor.__init__(self, 'hmmindex', args='%s' % hmmdb, **kw) if not os.path.exists(hmmdb + '.ssi'): if self.verbose: self.log.writeln( 'HMMINDEX: Indexing hmm database. This will take a while') self.run()
def __init__( self, hmmName, hmmdb, **kw ): """ @param hmmName: hmm profile name @type hmmName: str @param hmmdb: Pfam hmm database @type hmmdb: str """ self.hmmName = hmmName Executor.__init__( self, 'hmmfetch', args=' %s %s'%(hmmdb, hmmName), **kw ) self.f_out = tempfile.mktemp('.hmm')
def __init__( self, hmmdb, **kw ): """ @param hmmdb: Pfam hmm database @type hmmdb: str """ Executor.__init__( self, 'hmmindex', args='%s'%hmmdb, **kw ) if not os.path.exists(hmmdb+'.ssi'): if self.verbose: self.log.writeln( 'HMMINDEX: Indexing hmm database. This will take a while') self.run()
def __init__(self, model, cr=[0], cl=None, mode=2, breaks=0, catch_err=1, **kw): """ Create a new Intervor instance for a given protein-protein complex. @param model: Structure of receptor, ligand and water @type model: Biskit.PDBModel @param cr: receptor chains (default: [0] = first chain) @type cr: [ int ] @param cl: ligand chains (default: None = all remaining protein chains) @type cl: [ int ] @param breaks: consider chain breaks (backbone gaps) (default: 0) @type breaks: bool or 1|0 @param mode: what to calculate (default 2, = all with shelling order) @type mode: int @param catch_err: deviate STDERR to temporary file (default 1) @type catch_err: bool or 0|1 @param **kw: any other keyword=value pair recognized by Executor """ Executor.__init__(self, 'intervor', catch_err=catch_err, **kw) assert isinstance( model, B.PDBModel ), \ 'requires PDBModel instance' assert model is not None, 'requires PDBModel instance' self.model = model self.breaks = breaks self.chains_rec = cr self.chains_lig = cl or self.__getLigandChains(model, cr) self.local_model = None #: will hold modified copy of model self.mode = mode ## intervor puts several output files into current working directory ## but respect cwd from Executor.__init__ or ExeConfig/exe_intervor.dat self.cwd = self.cwd or tempfile.gettempdir() #: will be used by intervor for different output files self.f_prefix = tempfile.mktemp(dir=self.exe.cwd) self.f_pdb = self.f_prefix + '_intervor.pdb' self.result = {}
def fail( self ): """ Called if external program failed, Overrides Executor method. In some very rare cases SurfaceRacer round off error cause the program to terminate. The simplest remedy to this problem is to increase the probe radii with a very small number and rerun the calculation. """ self.i_failed += 1 if self.i_failed < 2: self.probe = self.probe + 0.001 self.run() Executor.fail( self )
def finish(self): """ Overrides Executor method """ Executor.finish(self) self.result = PDBModel(self.f_out) ## renumber atoms self.result['serial_number'] = N.arange(len(self.result)) ## rescue non-atom informations self.result.pdbCode = self.model.pdbCode self.result.info.update(self.model.info) self.result.residues.update(self.model.residues) self.result.info['reduce'] = 'hydrogens added/replaced by '\ + self.version()
def finish( self ): """ Overrides Executor method """ Executor.finish( self ) self.result = PDBModel( self.f_out ) ## renumber atoms self.result['serial_number'] = N.arange( len( self.result ) ) ## rescue non-atom informations self.result.pdbCode = self.model.pdbCode self.result.info.update( self.model.info ) self.result.residues.update( self.model.residues ) self.result.info['reduce'] = 'hydrogens added/replaced by '\ + self.version()
def cleanup(self): """ Tidy up the mess you created. """ Executor.cleanup(self) if not self.debug: T.tryRemove(self.f_pdb) T.tryRemove(self.f_relativeASA) T.tryRemove(self.f_residueASA) T.tryRemove('FOR???.DAT', wildcard=1) T.tryRemove('pdbout.tex') T.tryRemove('pdbout.txt') T.tryRemove('TEXSTORE.DAT') T.tryRemove('TEXTABLE.DAT')
def cleanup( self ): """ Tidy up the mess you created. """ Executor.cleanup( self ) if not self.debug: T.tryRemove( self.f_pdb ) T.tryRemove( self.f_relativeASA ) T.tryRemove( self.f_residueASA ) T.tryRemove('FOR???.DAT', wildcard=1) T.tryRemove('pdbout.tex') T.tryRemove('pdbout.txt') T.tryRemove('TEXSTORE.DAT') T.tryRemove('TEXTABLE.DAT')
def __init__(self, models, **kw): """ @param models: if more than one model is given they are concatenated and the energy is calculated for the two together. @type models: PDBModels @param kw: additional key=value parameters for Executor: @type kw: key=value pairs :: debug - 0|1, keep all temporary files (default: 0) verbose - 0|1, print progress messages to log (log != STDOUT) node - str, host for calculation (None->local) NOT TESTED (default: None) nice - int, nice level (default: 0) log - Biskit.LogFile, program log (None->STOUT) (default: None) """ self.models = models ## Potentials to use, pII3.0 is the default setting self.pairPot = 'prosa2003.pair-cb' # default: pII3.0.pair-cb self.surfPot = 'prosa2003.surf-cb' # default: pII3.0.surf-cb ## temp files for prosa pdb file and prosa output file self.prosaPdbFile = tempfile.mktemp('_prosa2003.pdb') self.prosaOutput = tempfile.mktemp('_prosa2003.out') self.temp_dir = T.tempDir() prosaInput = tempfile.mktemp('_prosa2003.inp') ## set default values self.objectName = 'obj1' self.lower_k = 1 self.upper_k = 600 self.pot_lb = 0. self.pot_ub = 15. Executor.__init__(self, 'prosa2003', template=self.inp, f_in=prosaInput, **kw) ## check the path to the potential files self.checkPotentials()
def __init__( self, template, leaprc=None, **kw ): """ @param template: template for leap input file (file or string) @type template: str @param leaprc: forcefield code (leaprc file ending, e.g. 'ff99') OR leaprc file name (e.g, 'leaprc.f99') OR leaprc path witin $AMBERHOME OR leaprc path default: take value from exe_tleap.dat @type leaprc: str @param f_in: complete leap input file -- existing or not, to be kept (default: create from template and then discard) @type f_in: str @param f_out: file for leap log output (default: discard) @type f_out: str @param out_parm: parm output file name (default: 'top.parm') @type out_parm: str @param out_crd : coordinate output file name (default '0.crd' ) @type out_crd : str @param kw: additional key=value parameters for Executor: @type kw: key=value pairs :: debug - 0|1, keep all temporary files (default: 0) verbose - 0|1, print progress messages to log (log != STDOUT) node - str, host for calculation (None->local) NOT TESTED (default: None) nice - int, nice level (default: 0) log - Biskit.LogFile, program log (None->STOUT) (default: None) """ # override some Executor defaults unless they are freshly given kw['catchout'] = kw.get('catchout',0) Executor.__init__( self, 'tleap', template=template, **kw ) self.args = '-f %s' % self.f_in self.leaprc = self.findLeaprc( leaprc or self.exe.leaprc ) ## set some defaults that may or may not have been specified self.out_parm = kw.get('out_parm', 'top.parm' ) self.out_crd = kw.get('out_crd', '0.crd')
def __init__( self, hmmName, hmmdb=settings.hmm_db, **kw ): """ @param hmmName: hmm profile name @type hmmName: str @param hmmdb: Pfam hmm database @type hmmdb: str **kw - all Executor parameters, in particular: @param f_out: target file name for profile.hmm, prevents its deletion @type f_out: str @param debug: @param log: @param ... """ self.hmmName = hmmName Executor.__init__( self, 'hmmfetch', args=' %s %s'%(hmmdb, hmmName), **kw )
def __init__( self, hmmFile, fastaFile, fastaID, **kw ): """ @param hmmFile: path to hmm file (profile) @type hmmFile: str @param fastaFile: path to fasta search sequence @type fastaFile: str @param fastaID: fasta id of search sequence @type fastaID: str """ self.fastaID = fastaID self.hmmFile = hmmFile self.fastaFile = fastaFile assert T.fileLength( self.hmmFile ) > 10, \ 'input HMM file missing or empty' Executor.__init__( self, 'hmmalign', args=' -q %s %s'%(hmmFile, fastaFile), **kw )
def __init__(self, models, **kw ): """ @param models: if more than one model is given they are concatenated and the energy is calculated for the two together. @type models: PDBModels @param kw: additional key=value parameters for Executor: @type kw: key=value pairs :: debug - 0|1, keep all temporary files (default: 0) verbose - 0|1, print progress messages to log (log != STDOUT) node - str, host for calculation (None->local) NOT TESTED (default: None) nice - int, nice level (default: 0) log - Biskit.LogFile, program log (None->STOUT) (default: None) """ self.models = models ## Potentials to use, pII3.0 is the default setting self.pairPot = 'prosa2003.pair-cb' # default: pII3.0.pair-cb self.surfPot = 'prosa2003.surf-cb' # default: pII3.0.surf-cb ## temp files for prosa pdb file and prosa output file self.prosaPdbFile = tempfile.mktemp('_prosa2003.pdb') self.prosaOutput = tempfile.mktemp('_prosa2003.out') self.temp_dir = T.tempDir() prosaInput = tempfile.mktemp('_prosa2003.inp') ## set default values self.objectName = 'obj1' self.lower_k = 1 self.upper_k = 600 self.pot_lb = 0. self.pot_ub = 15. Executor.__init__( self, 'prosa2003', template=self.inp, f_in=prosaInput, **kw ) ## check the path to the potential files self.checkPotentials()
def __init__( self, model, cr=[0], cl=None, mode=2, breaks=0, catch_err=1, **kw ): """ Create a new Intervor instance for a given protein-protein complex. @param model: Structure of receptor, ligand and water @type model: Biskit.PDBModel @param cr: receptor chains (default: [0] = first chain) @type cr: [ int ] @param cl: ligand chains (default: None = all remaining protein chains) @type cl: [ int ] @param breaks: consider chain breaks (backbone gaps) (default: 0) @type breaks: bool or 1|0 @param mode: what to calculate (default 2, = all with shelling order) @type mode: int @param catch_err: deviate STDERR to temporary file (default 1) @type catch_err: bool or 0|1 @param **kw: any other keyword=value pair recognized by Executor """ Executor.__init__( self, 'intervor', catch_err=catch_err, **kw ) assert isinstance( model, B.PDBModel ), \ 'requires PDBModel instance' assert model is not None, 'requires PDBModel instance' self.model = model self.breaks= breaks self.chains_rec = cr self.chains_lig = cl or self.__getLigandChains( model, cr ) self.local_model = None #: will hold modified copy of model self.mode = mode ## intervor puts several output files into current working directory ## but respect cwd from Executor.__init__ or ExeConfig/exe_intervor.dat self.cwd = self.cwd or tempfile.gettempdir() #: will be used by intervor for different output files self.f_prefix = tempfile.mktemp( dir=self.exe.cwd ) self.f_pdb = self.f_prefix + '_intervor.pdb' self.result = {}
def __init__(self, template, leaprc=None, **kw): """ @param template: template for leap input file (file or string) @type template: str @param leaprc: forcefield code (leaprc file ending, e.g. 'ff99') OR leaprc file name (e.g, 'leaprc.f99') OR leaprc path witin $AMBERHOME OR leaprc path default: take value from exe_tleap.dat @type leaprc: str @param f_in: complete leap input file -- existing or not, to be kept (default: create from template and then discard) @type f_in: str @param f_out: file for leap log output (default: discard) @type f_out: str @param out_parm: parm output file name (default: 'top.parm') @type out_parm: str @param out_crd : coordinate output file name (default '0.crd' ) @type out_crd : str @param kw: additional key=value parameters for Executor: @type kw: key=value pairs :: debug - 0|1, keep all temporary files (default: 0) verbose - 0|1, print progress messages to log (log != STDOUT) node - str, host for calculation (None->local) NOT TESTED (default: None) nice - int, nice level (default: 0) log - Biskit.LogFile, program log (None->STOUT) (default: None) """ # override some Executor defaults unless they are freshly given kw['catchout'] = kw.get('catchout', 0) Executor.__init__(self, 'tleap', template=template, **kw) self.args = '-f %s' % self.f_in self.leaprc = self.findLeaprc(leaprc or self.exe.leaprc) ## set some defaults that may or may not have been specified self.out_parm = kw.get('out_parm', 'top.parm') self.out_crd = kw.get('out_crd', '0.crd')
def __init__(self, hmmName, hmmdb=settings.hmm_db, **kw): """ @param hmmName: hmm profile name @type hmmName: str @param hmmdb: Pfam hmm database @type hmmdb: str **kw - all Executor parameters, in particular: @param f_out: target file name for profile.hmm, prevents its deletion @type f_out: str @param debug: @param log: @param ... """ self.hmmName = hmmName Executor.__init__(self, 'hmmfetch', args=' %s %s' % (hmmdb, hmmName), **kw)
def __init__(self, hmmFile, fastaFile, fastaID, **kw): """ @param hmmFile: path to hmm file (profile) @type hmmFile: str @param fastaFile: path to fasta search sequence @type fastaFile: str @param fastaID: fasta id of search sequence @type fastaID: str """ self.fastaID = fastaID self.hmmFile = hmmFile self.fastaFile = fastaFile assert T.fileLength( self.hmmFile ) > 10, \ 'input HMM file missing or empty' Executor.__init__(self, 'hmmalign', args=' -q %s %s' % (hmmFile, fastaFile), **kw)
def cleanup( self ): """ Remove files created for and by the calculation. """ Executor.cleanup( self ) if not self.debug: T.tryRemove( self.temp_pdb ) T.tryRemove( self.temp_command ) T.tryRemove( self.temp_option ) T.tryRemove( self.temp_result ) T.tryRemove( self.temp_runlog ) T.tryRemove( self.temp_errlog ) ## Fold-X writes a file called "runlog.txt" ## to local directory. Try to remove it. T.tryRemove( 'runlog.txt' ) ## and even though the error log is supposed ## to be written to self.temp_errlog, I get a ## 'errorfile.txt' in the local directory. Remove. T.tryRemove( 'errorfile.txt' )