Exemplo n.º 1
0
    def dumpMembers( self, traj, fname  ):
        """
        Dump ensemble member trajectories
        
        @param traj: Trajectory to dump
        @type  traj: Trajectory
        @param fname: trajectory file name - used to derrive name for members
        @type  fname: str'
        
        @return: list of trajectory files
        @rtype: [str]
        """
        fname = T.stripSuffix( T.absfile( fname, resolveLinks=0 ) )
        members = range( traj.n_members )

        r = []
        for n in members:
            f = fname + '_member_%02i.traj' % n
            if os.path.exists( f ):
                self.log.add('using existing ' + f )
            else:
                self.log.write('saving ' + f + '...')
                m = traj.takeMember( n )
                T.dump( m, f )
                self.log.add('done')
            r += [ f ]

        return r
Exemplo n.º 2
0
    def test_Analyzer( self):
        """Dock.Analyzer test """
        from Biskit import Trajectory
        from Biskit.Dock import ComplexList

        ## create a minimal 1-frame receptor trajectory from a pdb file
        self.t_rec = Trajectory( [t.testRoot()+'/rec/1A2P.pdb'],
                                 verbose=self.local)
        t.dump( self.t_rec, self.f_out )

        ## load a complex list
        cl = t.load( t.testRoot() + '/dock/hex/complexes.cl')

        self.a= Analyzer( rec = self.f_out,
                          lig = t.testRoot()+'/lig_pcr_00/traj.dat',
                          ref = t.testRoot()+'/com/ref.complex',
                          verbose = self.local)

        ## shuffle this list five times
        shuff_lst = self.a.shuffledLists( 5, range(8) )

        ## create two random contact matrices
        rand_mat = self.a.random_contacts( cl[0].atomContacts(), 2 )
        
        self.assertEqual( N0.shape(rand_mat[1]), (1075, 876) ) 
Exemplo n.º 3
0
 def saveProtocols( self ):
     """
     Save protocol to file.
     """
     f_prot = T.stripSuffix( T.absfile(self.fout) ) + '_protocols.dat'
     self.log.write( 'Saving parameters to %s...' % f_prot )
     T.dump( self.protocols, f_prot )
Exemplo n.º 4
0
 def done(self):
     """
     Write result to file.
     """
     tree = self.getResult()
     self.log.add("Saving result to %s..." % self.fout)
     T.dump( tree, self.fout )
     self.log.add( "Done" )
Exemplo n.º 5
0
    def saveRst( self, fname ):
        """
        Pickle data necessary for a restart of the running calculation.

        @param fname: file name
        @type  fname: str
        """
        T.dump( self.getRst(), fname )
Exemplo n.º 6
0
Arquivo: dope.py Projeto: graik/biskit
def updateModelDic( f ):
    """ Call update() on all models in a dict to make them aware of the
    new profiles."""

    print 'Updating ', f

    d = T.load( T.absfile( f ) )

    for m in d.values():
        m.update( updateMissing=1 )

    T.dump( d, T.absfile( f ) )
Exemplo n.º 7
0
    def write_PDBModels(self, pdb_list, output_file = None):
        """
        Pickles the list of PDBModels to disc.

        @param pdb_list: list of models
        @type  pdb_list: ModelList
        @param output_file: output file
                       (default: None S{->} outFolder/L{F_PDBModels_OUT})
        @type  output_file: str         
        """
        output_file = output_file or self.outFolder + self.F_PDBModels_OUT
        T.dump(pdb_list, '%s'%(output_file))
Exemplo n.º 8
0
 def dump(self, o):
     """
     Try to pickle an object to the currently valid path.
     
     @return: the absolute path to which o was pickled
     @rtype: str
     """
     try:
         f = self.local()
         T.dump(f, o)
         return f
     except:
         T.errWriteln("Couldn't dump to %s (constructed from %s)" %\
                      self.formatted(), self.local() )
         raise
Exemplo n.º 9
0
 def dump( self, o ):
     """
     Try to pickle an object to the currently valid path.
     
     @return: the absolute path to which o was pickled
     @rtype: str
     """
     try:
         f = self.local()
         T.dump( f, o )
         return f
     except:
         T.errWriteln("Couldn't dump to %s (constructed from %s)" %\
                      self.formatted(), self.local() )
         raise
Exemplo n.º 10
0
def changeModel(inFile, prefix, sourceModel):

    print '\nget ' + os.path.basename(inFile) + '..',

    model = PDBModel(inFile)

    model.update()

    model = model.sort()

    eq = model.equals(sourceModel)
    if not eq[0] and eq[1]:
        raise ConvertError('source and other models are not equal: ' + str(eq))


#    model.validSource()
    model.setSource(sourceModel.validSource())

    #model.atomsChanged = 0
    for k in model.atoms:
        model.atoms[k, 'changed'] = N0.all(model[k] == sourceModel[k])

    model.xyzChanged = (0 != N0.sum(N0.ravel(model.xyz - sourceModel.xyz)))

    model.update(updateMissing=1)

    if model.xyzChanged:

        doper = PDBDope(model)

        if 'MS' in sourceModel.atoms.keys():
            doper.addSurfaceRacer(probe=1.4)

        if 'density' in sourceModel.atoms.keys():
            doper.addDensity()

        if 'foldX' in sourceModel.info.keys():
            doper.addFoldX()

        if 'delphi' in sourceModel.info.keys():
            doper.addDelphi()

    outFile = os.path.dirname( inFile ) + '/' + prefix +\
            T.stripFilename( inFile ) + '.model'

    T.dump(model, outFile)

    print '-> ' + os.path.basename(outFile)
Exemplo n.º 11
0
    def pickleError(self, o):
        """
        Pickle object to disc.

        @param o: object to pickle
        @type  o: any
        """
        try:
            fname = self.ferror + '_dat'
            if not os.path.exists(fname):
                T.dump(o, fname)
        except Exception, why:
            f = open('ErrorReportError_ContactSlave', 'a')
            f.write('Could not pickle error infos\n')
            f.write(str(why))
            f.close()
Exemplo n.º 12
0
    def pickleError( self, o ):
        """
        Pickle object to disc.

        @param o: object to pickle
        @type  o: any
        """
        try:
            fname = self.ferror + '_dat'
            if not os.path.exists( fname ):
                T.dump( o, fname )
        except Exception, why:
            f = open('ErrorReportError_ContactSlave','a')
            f.write('Could not pickle error infos\n')
            f.write( str( why ) )
            f.close()
Exemplo n.º 13
0
Arquivo: dope.py Projeto: graik/biskit
def changeModel( inFile, prefix, sourceModel ):

    print '\nget ' + os.path.basename( inFile ) + '..',

    model = PDBModel( inFile )

    model.update()

    model = model.sort()

    eq = model.equals( sourceModel )
    if not eq[0] and eq[1]:
        raise ConvertError('source and other models are not equal: ' + str(eq))

#    model.validSource()
    model.setSource( sourceModel.validSource() )

    #model.atomsChanged = 0
    for k in model.atoms:
        model.atoms[k,'changed'] = N0.all( model[k] == sourceModel[k] )

    model.xyzChanged = ( 0 != N0.sum( N0.ravel( model.xyz - sourceModel.xyz)) )

    model.update( updateMissing=1 )

    if model.xyzChanged:

        doper = PDBDope( model )

        if 'MS' in sourceModel.atoms.keys():
            doper.addSurfaceRacer( probe=1.4 )

        if 'density' in sourceModel.atoms.keys():
            doper.addDensity()

##        if 'foldX' in sourceModel.info.keys():
##            doper.addFoldX()
            
        if 'delphi' in sourceModel.info.keys():
            doper.addDelphi()

    outFile = os.path.dirname( inFile ) + '/' + prefix +\
            T.stripFilename( inFile ) + '.model' 

    T.dump( model, outFile )

    print '-> ' + os.path.basename( outFile )
Exemplo n.º 14
0
    def dumpMissing( self, o, fname ):
        """
        Pickle *o* to path *fname*, if it is not already there.

        @param o: object to dump
        @type  o: any
        @param fname: file name
        @type  fname: str
        
        @return: file name
        @rtype: str
        """
        if os.path.exists( fname ):
            self.log.add('using existing ' + fname )
        else:
            self.log.add('Saving ' + fname )
            T.dump( o, fname )

        return fname
Exemplo n.º 15
0
def repair( f, old, new ):
    """
    Load and repickle with corrected path
    """
    try:
        o = load( f )
        print 'working on %s...' % f
        if isinstance( o, PDBModel)  : replace_in_PDBModel( o, old, new )
        if isinstance( o, Complex )  : replace_in_Complex(  o, old, new )
        if isinstance( o, Trajectory): replace_in_Traj( o, old, new )
        if isinstance( o, dict )     : replace_in_dict( o, old, new )
        if isinstance( o, list )     : replace_in_list( o, old, new )
        if isinstance( o, ComplexList): o = replace_in_ComplexList(o,old,new)

        dump( o, f )
        print
        
    except InvalidTargetPath, e:
        print e
Exemplo n.º 16
0
    def dumpMissing(self, o, fname):
        """
        Pickle *o* to path *fname*, if it is not already there.

        @param o: object to dump
        @type  o: any
        @param fname: file name
        @type  fname: str
        
        @return: file name
        @rtype: str
        """
        if os.path.exists(fname):
            self.log.add('using existing ' + fname)
        else:
            self.log.add('Saving ' + fname)
            T.dump(o, fname)

        return fname
Exemplo n.º 17
0
def compile_standard_dict( fpdb, fout, fmask=None, skip_first=True,
                           take_chain=True):
    """
    @param fpdb: str, pdb file containing standard residues
    @param fout: str, file name for pickled dictionary
    @param fmask: func, method to mask standard residues
    """
    m = B.PDBModel( fpdb )
    if take_chain:
        m = m.takeChains( [0] )
    if skip_first:
        m.removeRes( 0 )
    if fmask is not None:
        m = m.compress( fmask(m) )

    d = extract_unique_residues( m )
    for name in d:
        d[name] = normalize_residue( d[name] )
    T.dump( d, fout )
    return d
Exemplo n.º 18
0
def compile_standard_dict(fpdb,
                          fout,
                          fmask=None,
                          skip_first=True,
                          take_chain=True):
    """
    @param fpdb: str, pdb file containing standard residues
    @param fout: str, file name for pickled dictionary
    @param fmask: func, method to mask standard residues
    """
    m = B.PDBModel(fpdb)
    if take_chain:
        m = m.takeChains([0])
    if skip_first:
        m.removeRes(0)
    if fmask is not None:
        m = m.compress(fmask(m))

    d = extract_unique_residues(m)
    for name in d:
        d[name] = normalize_residue(d[name])
    T.dump(d, fout)
    return d
Exemplo n.º 19
0
    def __dumpFrames(self, traj, outFolder, prefix ):
        """
        @param traj: Trajectory
        @type  traj: Trajectory
        @param outFolder: folder for pickled arrays
        @type  outFolder: str
        @param prefix: file name prefix
        @type  prefix: str
        @return: { (int,int) : str } OR None, if traj is None
        @rtype: {(int,int) : str}
        """
        if traj is None:
            return None

        if self.verbose: self.log.write('dumping frame chunks...')

        n_frames = self.__windowSize( 20, len( self.hosts ), len( traj ) )

        i_windows = self.__getFrameWindows( traj, n_frames )

        r = {}

        for i in range( len(i_windows) ):

            w = i_windows[i]

            a = traj.frames[ w[0]:w[1] ]
            f = outFolder + '/%s_%i_to_%i.dat' % ((prefix,) + w)
            T.dump( a, f )
            r[w] = f

            if self.verbose and i % (len(i_windows)/50 + 1) == 0:
                self.log.write('#')

        if self.verbose: self.log.add('done')

        return r
Exemplo n.º 20
0
    def __dumpFrames(self, traj, outFolder, prefix):
        """
        @param traj: Trajectory
        @type  traj: Trajectory
        @param outFolder: folder for pickled arrays
        @type  outFolder: str
        @param prefix: file name prefix
        @type  prefix: str
        @return: { (int,int) : str } OR None, if traj is None
        @rtype: {(int,int) : str}
        """
        if traj is None:
            return None

        if self.verbose: self.log.write('dumping frame chunks...')

        n_frames = self.__windowSize(20, len(self.hosts), len(traj))

        i_windows = self.__getFrameWindows(traj, n_frames)

        r = {}

        for i in range(len(i_windows)):

            w = i_windows[i]

            a = traj.frames[w[0]:w[1]]
            f = outFolder + '/%s_%i_to_%i.dat' % ((prefix, ) + w)
            T.dump(a, f)
            r[w] = f

            if self.verbose and i % (len(i_windows) / 50 + 1) == 0:
                self.log.write('#')

        if self.verbose: self.log.add('done')

        return r
Exemplo n.º 21
0
options = tools.cmdDict( {'o':'traj.dat'} )

## get all PDBs and models directly from a directory (avoids shell limits)
if 'd' in options:
    d = tools.absfile( options['d'] )
    l = os.listdir( d )
    l = [ x for x in l if x[-4:].upper() == '.PDB' or x[-6:] == '.MODEL' \
          or x[-7:].upper() == '.PDB.GZ' ]
    l = [ os.path.join( d, f ) for f in l ]

    options['i'] = l

if 'e' in options:
    t_class = EnsembleTraj
else:
    t_class = Trajectory

traj = t_class( options['i'],
                options.get('r', None),
                rmwat   =options.has_key('wat'),
                castAll =options.has_key('c') )

## remove dependencies to source
traj.ref.disconnect()

if options.has_key('f'):
    traj.fit()

tools.dump( traj, options['o'] )
Exemplo n.º 22
0
import Biskit.tools as T

t = T.load( 'com_fake.etraj' )

x = t.takeFrames( range(0, t.n_members * 5) )

x.ref.disconnect()

T.dump( x, 'extract.etraj' )
Exemplo n.º 23
0
    except cPickle.UnpicklingError:

        print "Trying to load %s in sloppy mode..." % f
        return PickleUpgrader(open(T.absfile(f))).load()
    

#########
## Main
#########

__use()

fs = sys.argv[1:]

for f in fs:

    try:
        o = sloppyload( f )

        ## don't slim PDBModels that are their own source
        if isinstance( o, PDBModel ) and str( o.source ) == T.absfile( f ):
            o.forcePickle = 1
            
        T.flushPrint('Dumping %s\n' % f )
        T.dump( o, T.absfile( f ) )

    except:
        print "Error with ", f
        print T.lastError()
Exemplo n.º 24
0
## MAIN ##

use()

o = T.cmdDict({'n': 10})

f_in = T.absfile(o['i'])
f_out = T.absfile(o.get('o', f_in))
n = int(o['n'])

T.flushPrint("Loading...")
t = T.load(f_in)

T.flushPrint("Converting %i frames..." % len(t))

if isinstance(t, EnsembleTraj):
    T.flushPrint("Nothing to be done!\n")
    sys.exit(0)

t = traj2ensemble(t, n)
if 'pdb' in o:
    t.ref.pdbCode = o['pdb']

if f_in == f_out:
    os.rename(f_in, f_in + '_backup')

T.flushPrint("Saving...")
T.dump(t, f_out)

T.flushPrint("Done.\n")
Exemplo n.º 25
0
                                           niceness=nice_dic,
                                           outFile=subFile,
                                           com_version=version,
                                           show_output=show_x,
                                           add_hosts=add_hosts)

                    t.flushPrint('Start job processing .. ')
                    master.start()

                    ## wait until master is finished
                    while not master.isFinished():
                        time.sleep(5)

                ## subList contains no info to be updated
                else:
                    t.dump(subLst, subFile)

        print '\nCollecting final results...'
        complex_lst = ComplexList()
        for f in subFile_names:
            sub = t.load(f)
            complex_lst += sub
            os.unlink(f)

        t.dump(complex_lst, options['o'])

    else:

        subLst = checkListStatus(complex_lst, update, force, version)

        if subLst:
Exemplo n.º 26
0
def cluster(tc, options):

    n_cluster = int(options['n'])

    allowedAtoms = T.toList(options.get('a', []))
    if allowedAtoms:
        mask = tc.traj.ref.mask(lambda a: a['name'] in allowedAtoms)
    else:
        mask = selectedAtoms(tc.traj.ref)

    saveIn = T.absfile(options['o']) + '/'
    conv = float(options['conv'])

    tc.cluster(n_cluster, aMask=mask, converged=conv)

    ## collect center frame index for each cluster
    frames = [members[0] for members in tc.memberFrames()]

    result = tc.traj.takeFrames(frames)  ## trajectory of cluster centers

    model_dic = {}

    dic_index = 1

    if options.has_key('ref'):
        ## use user-provided reference structure
        if os.path.isfile(T.absfile(options['ref'])):
            print '\nUsing user specified reference pdb'
            m = PDBModel(options['ref'])
            m.remove(m.maskH2O())

        ## use reference in trajectory
        else:
            print '\nUsing reference in trajectory'
            m = tc.traj.ref

        m = dumpModel(m, options, saveIn + m.getPdbCode() + '_ref.model')
        ## add ref as first model in dictionary
        model_dic[dic_index] = m
        dic_index += 1

    ## save the individual models and add them to the model dictionary
    for i in range(0, result.lenFrames()):
        m = result.getPDBModel(i)

        m = dumpModel(
            m, options,
            saveIn + T.stripFilename(result.frameNames[i]) + '.model')

        model_dic[dic_index] = m
        dic_index += 1

    ## save model dictionary
    fdic = options['dic'] or m.getPdbCode() + '_models.dic'
    T.dump(model_dic, T.absfile(fdic))

    ## REDUNDANT CODE AS MULTIDOCK NOW WRITES THE HEX PDB FILES
    ##
    ##     ## save all models in the dictionary as HEX pdb files
    ##     for k in model_dic.keys():
    ##         m = model_dic[k]

    ##         ## remove hydrogens and sort atoms in standard order
    ##         m.remove( m.maskH() )
    ##         m = molUtils.sortAtomsOfModel(m)
    ##         setChainID( m )

    ##         ## save single hex pdbs
    ##         if options['hex']:
    ##             fhex = options['hex'] + '_%03d' %(k)
    ##         else:
    ##             fhex = m.getPdbCode() + '_%03d_hex.pdb'%(k)

    ##         hexTools.createHexPdb_single( m, T.absfile( fhex ) )

    #    fhex = options['hex'] or m.getPdbCode() + '_hex.pdb'
    #    hexTools.createHexPdb( model_dic, T.absfile( fhex ) )

    return result
Exemplo n.º 27
0
############

options = T.cmdDict( {'o':'ref.complex', 'lo':'lig.model', 'ro':'rec.model' } )

if len (sys.argv) < 3:
    _use( options )

## create a reference complex
print "Loading..."
ref_com =  PDBModel( options['c'] )
print "Removing water..."
ref_com.remove( lambda a: a['residue_name'] in ['TIP3','HOH','WAT'] )

## extract rec and lig chains
rec_chains = T.toIntList( options['r'] )
lig_chains = T.toIntList( options['l'] )

print "Extracting rec and lig..."
ref_rec = ref_com.takeChains( rec_chains )
ref_lig = ref_com.takeChains( lig_chains )

## create Protein complex
com = ProteinComplex( ref_rec, ref_lig )

print "Saving..."
ref_lig.saveAs( T.absfile( options['lo'] ) )

ref_rec.saveAs( T.absfile( options['ro'] ) )

T.dump( com, T.absfile( options['o']) )
Exemplo n.º 28
0
    return result


def test():
    options = defOptions()
    options['i'] = '/home/Bis/raik/data/tb/interfaces/c11/lig_pcr_00/traj.dat'
    options['step'] = '3'
    options['s'] = '0'
    options['o'] = '~johan/dock/scripts'
    options['ref'] = ''

    return options


if __name__ == '__main__':
    if len(sys.argv) < 2:
        _use()

##    options = test()
    options = T.cmdDict( defOptions() )
                       
    tc = load( options )

    r = cluster( tc, options )
##    r=rmsdLimitedClustering( tc, options )
    if options.has_key('co'):
        T.dump( tc, options['co'] )

    report( tc )
        
Exemplo n.º 29
0
#!/usr/bin/env python

import Biskit.tools as T

## pickle to raw data
t = T.load('traj.dat')

t.ref.writePdb('traj_ref.pdb')
T.dump(t.frameNames, 'traj_framenames.list')  ## for sorting by time and member
t.writeCrd('traj.crd')
Exemplo n.º 30
0
    
    rec_dic = T.load( T.absfile( options['rdic'] ) )
    lig_dic = T.load( T.absfile( options['ldic'] ) )

    for f in fs:

        T.flushPrint('Loading %s ...' % f )

        cl = T.load( f )
        
        cl = reduceComplexList( cl )

        result += cl

    T.flushPrint('done\n')

    T.flushPrint('correct model numbers...')
    correct_model_numbers( result, rec_dic, lig_dic )

    T.flushPrint( '\ncasting all rec models...' )
    pairwise_cast( result.models.recModels() )

    T.flushPrint( '\ncasting all lig models...')
    pairwise_cast( result.models.ligModels() )
    T.flushPrint('done\n')

    save_changed_models( result, T.absfile( options['mo']) )

    T.flushPrint('Dumping result to %s' % T.absfile( options['o'] ) )
    T.dump( result, T.absfile( options['o'] ) )
Exemplo n.º 31
0
    relacements = tools.toList(options['r'])
    repl = [r.split(':') for r in relacements]

for f in files:
    try:
        print "Re-localizing ", f
        f = tools.absfile(f)
        o = tools.load(f)
        result = 0

        if o.__class__ in [PCRModel, PDBModel]:
            result = localizeModel(o, repl, f)

        else:
            if o.__class__ in [Complex]:
                result = localizeComplex(o, repl)

            else:
                print "unknown class", o.__class__

        if result:
            f_bak = f + '__old'
            os.rename(f, f_bak)
            tools.dump(o, f)
            print "..done"
        else:
            print "..skipped"

    except Exception, why:
        print "ERROR converting %s: %s" % (f, str(why))
Exemplo n.º 32
0
    rec_dic = T.load(T.absfile(options['rdic']))
    lig_dic = T.load(T.absfile(options['ldic']))

    for f in fs:

        T.flushPrint('Loading %s ...' % f)

        cl = T.load(f)

        cl = reduceComplexList(cl)

        result += cl

    T.flushPrint('done\n')

    T.flushPrint('correct model numbers...')
    correct_model_numbers(result, rec_dic, lig_dic)

    T.flushPrint('\ncasting all rec models...')
    pairwise_cast(result.models.recModels())

    T.flushPrint('\ncasting all lig models...')
    pairwise_cast(result.models.ligModels())
    T.flushPrint('done\n')

    save_changed_models(result, T.absfile(options['mo']))

    T.flushPrint('Dumping result to %s' % T.absfile(options['o']))
    T.dump(result, T.absfile(options['o']))
Exemplo n.º 33
0
#!/usr/bin/env python

import Biskit.tools as T

## pickle to raw data
t = T.load( 'traj.dat' )

t.ref.writePdb('traj_ref.pdb')
T.dump(t.frameNames, 'traj_framenames.list')  ## for sorting by time and member
t.writeCrd('traj.crd')



Exemplo n.º 34
0
def cluster( tc, options ):
    
    n_cluster = int( options['n'] )

    allowedAtoms = T.toList( options.get('a',[]) )
    if allowedAtoms:
        mask = tc.traj.ref.mask( lambda a: a['name'] in allowedAtoms )
    else:
        mask = selectedAtoms( tc.traj.ref )
    
    saveIn = T.absfile( options['o'] ) + '/'
    conv = float( options['conv'] )
    
    tc.cluster( n_cluster, aMask=mask, converged=conv )

    ## collect center frame index for each cluster
    frames = [ members[0] for members in tc.memberFrames() ]

    result = tc.traj.takeFrames( frames ) ## trajectory of cluster centers

    model_dic = {}

    dic_index = 1

    if options.has_key('ref'):
        ## use user-provided reference structure
        if os.path.isfile( T.absfile(options['ref']) ):
            print '\nUsing user specified reference pdb'
            m = PDBModel( options['ref'] )
            m.remove( m.maskH2O() )
            
        ## use reference in trajectory
        else:
            print '\nUsing reference in trajectory' 
            m = tc.traj.ref

        m = dumpModel( m, options, saveIn+m.getPdbCode()+'_ref.model')
        ## add ref as first model in dictionary   
        model_dic[dic_index] = m
        dic_index += 1

    ## save the individual models and add them to the model dictionary
    for i in range(0, result.lenFrames() ):
        m = result.getPDBModel(i)

        m = dumpModel(m, options, saveIn +
                  T.stripFilename(result.frameNames[i]) +'.model' )

        model_dic[dic_index] = m
        dic_index += 1
        
        
    ## save model dictionary
    fdic = options['dic'] or m.getPdbCode() + '_models.dic'
    T.dump( model_dic, T.absfile( fdic ) )

## REDUNDANT CODE AS MULTIDOCK NOW WRITES THE HEX PDB FILES
##
##     ## save all models in the dictionary as HEX pdb files
##     for k in model_dic.keys():
##         m = model_dic[k]
        
##         ## remove hydrogens and sort atoms in standard order
##         m.remove( m.maskH() )
##         m = molUtils.sortAtomsOfModel(m)
##         setChainID( m )

##         ## save single hex pdbs
##         if options['hex']:
##             fhex = options['hex'] + '_%03d' %(k) 
##         else:
##             fhex = m.getPdbCode() + '_%03d_hex.pdb'%(k)
   
##         hexTools.createHexPdb_single( m, T.absfile( fhex ) )
       
#    fhex = options['hex'] or m.getPdbCode() + '_hex.pdb'
#    hexTools.createHexPdb( model_dic, T.absfile( fhex ) )
    
    return result
Exemplo n.º 35
0
    except cPickle.UnpicklingError:

        print "Trying to load %s in sloppy mode..." % f
        return PickleUpgrader(open(T.absfile(f))).load()
    

#########
## Main
#########

__use()

fs = sys.argv[1:]

for f in fs:

    try:
        o = sloppyload( f )

        ## don't slim PDBModels that are their own source
        if isinstance( o, PDBModel ) and str( o.source ) == T.absfile( f ):
            o.forcePickle = 1
            
        T.flushPrint('Dumping %s\n' % f )
        T.dump( o, T.absfile( f ) )

    except:
        print "Error with ", f
        print T.lastError()
Exemplo n.º 36
0
    else:
        options[k] = 0

for k in ['chains', 'ex1', 'ex2', 'ex']:
    if k in options:
        options[k] = t.toIntList(options[k])

if 'atoms' in options:
    options['atoms'] = t.toList(options['atoms'])

if 'ex1' in options and 'ex2' in options:
    options['ex'] = (options['ex1'], options['ex2'])
else:
    options['ex'] = options.get('ex', options.get('ex1', None))

if 'log' in options:
    options['log'] = LogFile(options['log'])

f_in = options['i']
del options['i']

a = AmberEntropist(f_in, **options)
a.run()

t.dump(a.result, options['o'])

print "Dumped detailed result to %s. (for python unpickling)" % options['o']
print "Entropy in cal/mol-kelvin (total, vibrational): ",
print a.result['S_total'], ',', a.result['S_vibes']
print
Exemplo n.º 37
0
    else:
        options[k] = 0

for k in ['chains','ex1', 'ex2', 'ex']:
    if k in options:
        options[k] = t.toIntList( options[k] )

if 'atoms' in options:
    options['atoms'] = t.toList( options['atoms'] )

if 'ex1' in options and 'ex2' in options:
    options['ex'] = ( options['ex1'], options['ex2'] )
else:
    options['ex'] = options.get( 'ex', options.get('ex1', None) )

if 'log' in options:
    options['log'] = LogFile( options['log'] )

f_in = options['i']
del options['i']

a = AmberEntropist( f_in, **options )
a.run()

t.dump( a.result, options['o'] )

print "Dumped detailed result to %s. (for python unpickling)" % options['o']
print "Entropy in cal/mol-kelvin (total, vibrational): ",
print a.result['S_total'], ',', a.result['S_vibes']
print
Exemplo n.º 38
0
    return result


def test():
    options = defOptions()
    options['i'] = '/home/Bis/raik/data/tb/interfaces/c11/lig_pcr_00/traj.dat'
    options['step'] = '3'
    options['s'] = '0'
    options['o'] = '~johan/dock/scripts'
    options['ref'] = ''

    return options


if __name__ == '__main__':
    if len(sys.argv) < 2:
        _use()

##    options = test()
    options = T.cmdDict(defOptions())

    tc = load(options)

    r = cluster(tc, options)
    ##    r=rmsdLimitedClustering( tc, options )
    if options.has_key('co'):
        T.dump(tc, options['co'])

    report(tc)
Exemplo n.º 39
0
options = tools.cmdDict({'o': 'traj.dat'})

## get all PDBs and models directly from a directory (avoids shell limits)
if 'd' in options:
    d = tools.absfile(options['d'])
    l = os.listdir(d)
    l = [ x for x in l if x[-4:].upper() == '.PDB' or x[-6:] == '.MODEL' \
          or x[-7:].upper() == '.PDB.GZ' ]
    l = [os.path.join(d, f) for f in l]

    options['i'] = l

if 'e' in options:
    t_class = EnsembleTraj
else:
    t_class = Trajectory

traj = t_class(options['i'],
               options.get('r', None),
               rmwat=options.has_key('wat'),
               castAll=options.has_key('c'))

## remove dependencies to source
traj.ref.disconnect()

if options.has_key('f'):
    traj.fit()

tools.dump(traj, options['o'])
Exemplo n.º 40
0
import Biskit.tools as T
import Biskit as B
import numpy as N

com = T.load( 'com.traj' )

# re-order frames into 4 parallel trajectories
frames = N.zeros( len(com), int )

for i in range( 11 ):
    N.put( frames, range(i*4,i*4+4), N.arange(i,44,11) )

etraj = B.EnsembleTraj( n_members=4 )
etraj.frames = com.takeFrames( frames ).frames
etraj.ref = com.ref
etraj.resetFrameNames()
etraj.ref.disconnect()

# separate protein and DNA into two chains
etraj.ref.chainIndex(breaks=True, force=True, cache=True)
etraj.ref.addChainId()

## extract only some residues for speed
t1 = etraj.takeAtoms( etraj.ref.res2atomIndices(range(10)) )
t2 = etraj.takeChains( [1] )

etraj = t1.concatAtoms( t2 )

T.dump( etraj, 'com_fake.etraj' )
Exemplo n.º 41
0
    for xyz in t.frames:
        result_xyz.append( xyz.astype('f') )

    for fname in t.frameNames:
        result_frameNames.append( fname )
    
    T.flushPrint('#')
    
print " Done"

result = Trajectory()

result.ref = result_ref
result.ref.disconnect()

if 'pdb' in o:
    result.ref.pdbCode = o['pdb']

result.frames      = N0.array( result_xyz, 'f' )
result.frameNames  = result_frameNames

del result_xyz
## too much memory required for this
## result = trajLst[0].concat( *trajLst[1:] )

T.flushPrint("Converting to EnsembleTraj...")
result = traj2ensemble( result, len(inLst))

T.flushPrint( "Done\nDumping ensemble traj to " + o['o'] )
T.dump( result, T.absfile( o['o'] ) )
Exemplo n.º 42
0
## MAIN ##

use()

o = T.cmdDict( {'n':10} )

f_in  = T.absfile( o['i'] )
f_out = T.absfile( o.get('o', f_in) )
n = int( o['n'] )

T.flushPrint("Loading...")
t = T.load( f_in )

T.flushPrint("Converting %i frames..." % len(t) )

if isinstance(t, EnsembleTraj ):
    T.flushPrint( "Nothing to be done!\n")
    sys.exit(0)
    
t = traj2ensemble( t, n )
if 'pdb' in o:
    t.ref.pdbCode = o['pdb']

if f_in == f_out:
    os.rename( f_in, f_in + '_backup')

T.flushPrint("Saving...")
T.dump( t, f_out )

T.flushPrint("Done.\n")
Exemplo n.º 43
0
import Biskit.tools as T
import Biskit as B
import numpy as N

com = T.load('com.traj')

# re-order frames into 4 parallel trajectories
frames = N.zeros(len(com), int)

for i in range(11):
    N.put(frames, range(i * 4, i * 4 + 4), N.arange(i, 44, 11))

etraj = B.EnsembleTraj(n_members=4)
etraj.frames = com.takeFrames(frames).frames
etraj.ref = com.ref
etraj.resetFrameNames()
etraj.ref.disconnect()

# separate protein and DNA into two chains
etraj.ref.chainIndex(breaks=True, force=True, cache=True)
etraj.ref.addChainId()

## extract only some residues for speed
t1 = etraj.takeAtoms(etraj.ref.res2atomIndices(range(10)))
t2 = etraj.takeChains([1])

etraj = t1.concatAtoms(t2)

T.dump(etraj, 'com_fake.etraj')
Exemplo n.º 44
0
    for key in ['indi', 'exdi', 'salt', 'ionrad', 'prbrad', 
                 'bndcon', 'scale', 'perfil']:
        if key in options: options[key] = float( options[key] )

    if 'log' in options:
        options['log'] = LogFile( options['log'] )

    ## create a complex
    com = inputComplex( options )

    dg = DelphiBindingEnergy( com, **options )
    r = dg.run()
    
    print "Saving result complex to ", f_ocom
    T.dump( dg.delphicom, f_ocom )
    
    print "Final Result"
    print "============"
    print report( dg.delphicom )
    
    f = open( f_out, 'w' )
    f.write( report( dg.delphicom ) )
    f.close()
    print "energy values written to ", f_out
    
except KeyError, why:
    print 'Insufficient options. Missing: ', (str(why))
    _use( options )

except Exception, why:
Exemplo n.º 45
0
    for xyz in t.frames:
        result_xyz.append(xyz.astype('f'))

    for fname in t.frameNames:
        result_frameNames.append(fname)

    T.flushPrint('#')

print " Done"

result = Trajectory()

result.ref = result_ref
result.ref.disconnect()

if 'pdb' in o:
    result.ref.pdbCode = o['pdb']

result.frames = N.array(result_xyz, 'f')
result.frameNames = result_frameNames

del result_xyz
## too much memory required for this
## result = trajLst[0].concat( *trajLst[1:] )

T.flushPrint("Converting to EnsembleTraj...")
result = traj2ensemble(result, len(inLst))

T.flushPrint("Done\nDumping ensemble traj to " + o['o'])
T.dump(result, T.absfile(o['o']))
Exemplo n.º 46
0
            'indi', 'exdi', 'salt', 'ionrad', 'prbrad', 'bndcon', 'scale',
            'perfil'
    ]:
        if key in options: options[key] = float(options[key])

    if 'log' in options:
        options['log'] = LogFile(options['log'])

    ## create a complex
    com = inputComplex(options)

    dg = DelphiBindingEnergy(com, **options)
    r = dg.run()

    print "Saving result complex to ", f_ocom
    T.dump(dg.delphicom, f_ocom)

    print "Final Result"
    print "============"
    print report(dg.delphicom)

    f = open(f_out, 'w')
    f.write(report(dg.delphicom))
    f.close()
    print "energy values written to ", f_out

except KeyError, why:
    print 'Insufficient options. Missing: ', (str(why))
    _use(options)

except Exception, why:
Exemplo n.º 47
0
if len (sys.argv) < 2:
    _use( default )

options = T.cmdDict( default )

## where to run the calculation
base_folder = T.absfile( options['r'] )+'/'

## template gly-xxx-gly
template = os.path.abspath( options['i'] )

## label the result dictionaty files
label = '_' + options['l']

## mask to used to delete glycines
mask = [ int(i) for i in T.toList( options['mask'] )]

## create random prptides from template
#randomPeptides( template, base_folder )

## collect average surfaces
MS, AS, MS_sd, AS_sd = randomSurfaces( base_folder, label, mask )

## save dictionary with all 20 amino acids
T.dump( MS, base_folder + 'MS%s.dic'%label)
T.dump( AS, base_folder + 'AS%s.dic'%label )
T.dump( MS_sd, base_folder + 'MS_sd%s.dic'%label )
T.dump( AS_sd, base_folder + 'AS_sd%s.dic'%label )

Exemplo n.º 48
0
for f in files:
    try:
        print "Re-localizing ", f
        f = tools.absfile( f )
        o = tools.load( f )
        result = 0

        if o.__class__ in [ PCRModel, PDBModel ]:
            result = localizeModel( o, repl, f )

        else:
            if o.__class__ in [ Complex ]:
                result = localizeComplex( o, repl )

            else:
                print "unknown class", o.__class__

        if result:
            f_bak = f + '__old'
            os.rename( f, f_bak )
            tools.dump( o, f )
            print "..done"
        else:
            print "..skipped"


    except Exception, why:
        print "ERROR converting %s: %s" % (f, str(why))
            
Exemplo n.º 49
0
                                           niceness = nice_dic,
                                           outFile = subFile,
                                           com_version = version,
                                           show_output = show_x,
                                           add_hosts=add_hosts)

                    t.flushPrint('Start job processing .. ')
                    master.start()

                    ## wait until master is finished
                    while not master.isFinished():
                        time.sleep( 5 )

                ## subList contains no info to be updated   
                else:
                    t.dump( subLst, subFile )

        print '\nCollecting final results...'
        complex_lst = ComplexList()
        for f in subFile_names:
            sub = t.load( f )
            complex_lst += sub
            os.unlink( f )

        t.dump( complex_lst, options['o'] )

    else:

        subLst = checkListStatus(complex_lst, update, force, version )

        if subLst:
Exemplo n.º 50
0
import Biskit.tools as T

t = T.load('com_fake.etraj')

x = t.takeFrames(range(0, t.n_members * 5))

x.ref.disconnect()

T.dump(x, 'extract.etraj')
Exemplo n.º 51
0
if len (sys.argv) < 2:
    _use( default )

options = T.cmdDict( default )

## where to run the calculation
base_folder = T.absfile( options['r'] )+'/'

## template gly-xxx-gly
template = os.path.abspath( options['i'] )

## label the result dictionaty files
label = '_' + options['l']

## mask to used to delete glycines
mask = [ int(i) for i in T.toList( options['mask'] )]

## create random prptides from template
#randomPeptides( template, base_folder )

## collect average surfaces
MS, AS, MS_sd, AS_sd = randomSurfaces( base_folder, label, mask )

## save dictionary with all 20 amino acids
T.dump( MS, base_folder + 'MS%s.dic'%label)
T.dump( AS, base_folder + 'AS%s.dic'%label )
T.dump( MS_sd, base_folder + 'MS_sd%s.dic'%label )
T.dump( AS_sd, base_folder + 'AS_sd%s.dic'%label )