Ejemplo n.º 1
0
    def finish(self):
        """
        Overrides Executor method
        """
        Executor.finish(self)

        self.result = self.parse_result()

        ## if probe radius other than 1.4 A the relative surface exposure
        ## cannot be calculated, but allow this check to be a little flexible
        ## if we ate forced to slightly increase the radii to excape round off
        ## SurfaceRacer errors
        try:
            if round(self.probe, 1) == 1.4 and self.vdw_set == 1:
                self.__relExposure('MS')
                self.__relExposure('AS')
            else:
                EHandler.warning("No relative accessabilities calculated "+\
                                 "when using a prob radius other than 1.4 A"+\
                                 " or not using the Richards vdw radii set.")
        except KeyError as what:
            EHandler.warning("Missing standard accessibilities for some "+\
                             "atoms. No relative accesibilities calculated.")
            if 'relMS' in self.result: del self.result['relMS']
            if 'relAS' in self.result: del self.result['relAS']
Ejemplo n.º 2
0
def single2longAA( seq ):
    """
    Convert string of 1-letter AA code into list of 3-letter AA codes.
    
    :param seq: amino acid sequence in 1-letter code
    :type  seq: str
    
    :return: list with the amino acids in 3-letter code
    :rtype: [str]
    """
    ## invert AA dict
    invTab = {}

    for key in aaDicStandard:
        invTab[ aaDicStandard[key] ] = key

    result = []
    for aa in seq:
        try:
            aa = aa.upper()
            result += [ invTab[aa].upper() ]
        except:
            EHandler.warning("unknown residue: " + str(aa))
            result += ['Xaa']

    return result
Ejemplo n.º 3
0
    def loadResContacts( self ):
        """
        Uncompress residue contact matrix if necessary.
        
        @return: dict with contact matrix and parameters OR None
        @rtype: dict OR None
        """
        ## Backwards compatibility
        if self.contacts is not None and type( self.contacts ) == str:
            self.contacts = t.load( self.contacts )
            EHandler.warning("loading old-style pickled contacts.") 
            return self.contacts

        ## New, uncompression from list of indices into raveled array
        if self.contacts is not None and \
           len( N0.shape( self.contacts['result'])) == 1:

            try:
                lenRec, lenLig = self.contacts['shape']
            except:
                EHandler.warning("uncompressing contacts without shape")
                lenRec = self.rec().lenResidues()
                lenLig = self.lig().lenResidues()

            m = N0.zeros( lenRec * lenLig )
            N0.put( m, self.contacts['result'], 1 )

            self.contacts['result'] = N0.reshape( m, (lenRec, lenLig) )

        return self.contacts
Ejemplo n.º 4
0
    def loadResContacts(self):
        """
        Uncompress residue contact matrix if necessary.
        
        @return: dict with contact matrix and parameters OR None
        @rtype: dict OR None
        """
        ## Backwards compatibility
        if self.contacts is not None and type(self.contacts) == str:
            self.contacts = t.load(self.contacts)
            EHandler.warning("loading old-style pickled contacts.")
            return self.contacts

        ## New, uncompression from list of indices into raveled array
        if self.contacts is not None and \
           len( N0.shape( self.contacts['result'])) == 1:

            try:
                lenRec, lenLig = self.contacts['shape']
            except:
                EHandler.warning("uncompressing contacts without shape")
                lenRec = self.rec().lenResidues()
                lenLig = self.lig().lenResidues()

            m = N0.zeros(lenRec * lenLig)
            N0.put(m, self.contacts['result'], 1)

            self.contacts['result'] = N0.reshape(m, (lenRec, lenLig))

        return self.contacts
Ejemplo n.º 5
0
    def finish( self ):
        """
        Overrides Executor method
        """
        Executor.finish( self )

        self.result = self.parse_result()

        ## if probe radius other than 1.4 A the relative surface exposure
        ## cannot be calculated, but allow this check to be a little flexible
        ## if we ate forced to slightly increase the radii to excape round off
        ## SurfaceRacer errors
        try:
            if round(self.probe, 1) == 1.4 and self.vdw_set == 1:
                self.__relExposure('MS')
                self.__relExposure('AS')
            else:
                EHandler.warning("No relative accessabilities calculated "+\
                                 "when using a prob radius other than 1.4 A"+\
                                 " or not using the Richards vdw radii set.")
        except KeyError as what:
            EHandler.warning("Missing standard accessibilities for some "+\
                             "atoms. No relative accesibilities calculated.")
            if 'relMS' in self.result: del self.result['relMS']
            if 'relAS' in self.result: del self.result['relAS']
Ejemplo n.º 6
0
 def __defaults(self):
     self.models = getattr(self, 'models', ComplexModelRegistry())
     if getattr(self, 'rec_models', 0) != 0:
         EHandler.warning(
             're-creating model registry..re-pickle this list!')
         for c in self.toList():
             self.models.addComplex(c)
         del self.rec_models
         del self.lig_models
Ejemplo n.º 7
0
 def __defaults( self ):
     self.models = getattr( self, 'models', ComplexModelRegistry() )
     if getattr( self, 'rec_models', 0) != 0:
         EHandler.warning(
             're-creating model registry..re-pickle this list!')
         for c in self.toList():
             self.models.addComplex( c )
         del self.rec_models
         del self.lig_models
Ejemplo n.º 8
0
    def __getstate__(self):
        """
        Called before pickling the object.
        """
        try:
            if type( self.frames ) == list or self.frames.dtype.char == 'd':
                EHandler.warning("Converting coordinates to float array.")
                self.frames = N0.array( self.frames ).astype(N0.Float32)
        except:
            EHandler.warning('Could not convert frames to float array.', 1)

        return self.__dict__
Ejemplo n.º 9
0
    def writePdb( self, index, fname):
        """
        Write (possibly transformed) coordinates back to pdb.

        :param index: frame index in trajectory
        :type  index: int
        :param fname: name of new file
        :type  fname: str 
        """
        try:
            self.getPDBModel( index ).writePdb( fname )
        except:
            EHandler.error('Error writing %s.' % fname)
Ejemplo n.º 10
0
    def __substitute( self, fragments, name, value ):
        """
        Look in all not yet substituted fragments for parts that can be
        substituted by value and, if successful, create a new fragment
        
        :param fragments: fragment tuples
        :type  fragments: [ (str, str) ]
        :param name: substitution variable name
        :type  name: str
        :param value: susbtitution value in current environment
        :type  value: str
        
        :return: fragment tuples
        :rtype: [ (str, str) ]
        """
        result = []

        try:
            for abs, subst in fragments:

                if not subst:   ## unsubstituted fragment

##                     pos = abs.find( value )
                    pos = self.__find_subpath( abs, value )

                    if pos != -1:
                        end = pos + len( value )

                        f1, f2, f3 = abs[0:pos], abs[pos:end], abs[end:]

                        if f1:
                            result += [ (f1, None) ] ## unsubstituted head
                        result += [ (f2, name) ]     ## new substitution
                        if f3:
                            result += [ (f3, None) ] ## unsubstituted tail

                    else:
                        result += [ (abs, subst) ]
                else:
                    result += [ (abs, subst ) ]
        except OSError as why:
            EHandler.fatal("Substituting path fragments: \n" +
                                 str( fragments ) + '\nname: ' + str( name ) +
                                 '\nvalue:' + str( value ) )

        return result
Ejemplo n.º 11
0
    def __substitute(self, fragments, name, value):
        """
        Look in all not yet substituted fragments for parts that can be
        substituted by value and, if successful, create a new fragment
        
        :param fragments: fragment tuples
        :type  fragments: [ (str, str) ]
        :param name: substitution variable name
        :type  name: str
        :param value: susbtitution value in current environment
        :type  value: str
        
        :return: fragment tuples
        :rtype: [ (str, str) ]
        """
        result = []

        try:
            for abs, subst in fragments:

                if not subst:  ## unsubstituted fragment

                    ##                     pos = abs.find( value )
                    pos = self.__find_subpath(abs, value)

                    if pos != -1:
                        end = pos + len(value)

                        f1, f2, f3 = abs[0:pos], abs[pos:end], abs[end:]

                        if f1:
                            result += [(f1, None)]  ## unsubstituted head
                        result += [(f2, name)]  ## new substitution
                        if f3:
                            result += [(f3, None)]  ## unsubstituted tail

                    else:
                        result += [(abs, subst)]
                else:
                    result += [(abs, subst)]
        except OSError as why:
            EHandler.fatal("Substituting path fragments: \n" + str(fragments) +
                           '\nname: ' + str(name) + '\nvalue:' + str(value))

        return result
Ejemplo n.º 12
0
    def __syncModel( self, new_model, old_model ):
        """
        Connect new rec or lig model to old one, to minimize storage.

        @param new_model: PDBModel / XplorModel
        @type  new_model: PDBModel
        @param old_model: PDBModel / XplorModel
        @type  old_model: PDBModel

        @return: PDBModel / XplorModel, new model that only keeps
                 changes relative to old, the old model becomes the
                 source of the new, if possible
        @rtype: PDBModel
        """
        ## try to fix atom order of new_model so that it is identical to old
        if old_model.equals( new_model ) != [1,1]:
            i_new, i_old = new_model.compareAtoms( old_model )

            if len( i_new ) == len( new_model ):
                new_model.keep( i_new )

        ## create result model that only keeps difference of new and old
        if old_model.equals( new_model ) == [1,1]:

            ## stays compatible with XplorModel.__init__ and PDBModel.__init
            r = old_model.__class__( source=old_model )

            r.setXyz( new_model.getXyz() )

            ## check for profiles identical to source and adapt 'changed'
            r.update()  

            if not MU.arrayEqual( r.xyz, old_model.xyz ):
                r.removeProfile( 'relASA', 'ASA_sc', 'ASA_total', 'ASA_bb' )

            return r

        EHandler.warning(
            'ComplexVC: Cannot connect new to old PDBModel.')

        new_model.disconnect()
        return new_model
Ejemplo n.º 13
0
    def __syncModel(self, new_model, old_model):
        """
        Connect new rec or lig model to old one, to minimize storage.

        @param new_model: PDBModel / XplorModel
        @type  new_model: PDBModel
        @param old_model: PDBModel / XplorModel
        @type  old_model: PDBModel

        @return: PDBModel / XplorModel, new model that only keeps
                 changes relative to old, the old model becomes the
                 source of the new, if possible
        @rtype: PDBModel
        """
        ## try to fix atom order of new_model so that it is identical to old
        if old_model.equals(new_model) != [1, 1]:
            i_new, i_old = new_model.compareAtoms(old_model)

            if len(i_new) == len(new_model):
                new_model.keep(i_new)

        ## create result model that only keeps difference of new and old
        if old_model.equals(new_model) == [1, 1]:

            ## stays compatible with XplorModel.__init__ and PDBModel.__init
            r = old_model.__class__(source=old_model)

            r.setXyz(new_model.getXyz())

            ## check for profiles identical to source and adapt 'changed'
            r.update()

            if not MU.arrayEqual(r.xyz, old_model.xyz):
                r.removeProfile('relASA', 'ASA_sc', 'ASA_total', 'ASA_bb')

            return r

        EHandler.warning('ComplexVC: Cannot connect new to old PDBModel.')

        new_model.disconnect()
        return new_model
Ejemplo n.º 14
0
    def validate( self ):
        """
        Validate the path to the binary.
        
        :raise ExeConfigError: if environment is not fit for running
                               the program
        """
        try:
            self.bin = T.absbinary( self.bin ) ## raises IOError if not found

            missing = self.update_environment()
            report = '%s is missing environment variables: %r'\
                     % (self.name, missing )

            if missing and self.strict:
                raise ExeConfigError(report)

            if missing:
                EHandler.warning( report )

        except IOError as e:
            ## re-raise but silence reporting of IOError in stack trace
            raise ExeConfigError(str(e) + ' Check %s!' % self.dat) from None  
Ejemplo n.º 15
0
    def concat( self, *traj ):
        """
        Concatenate this with other trajectories. The ref model of the
        new Trajectory is a 'semi-deep' copy of this trajectorie's model.
        (see :class:`PDBModel.take()` )::
           concat( traj [, traj2, traj3, ..] ) -> Trajectory 

        :param traj: one or more Trajectory with identical atoms as this one
        :type  traj: Trajectories

        :return: concatenated trajecties
        :rtype: Trajectory
        """
        if len( traj ) == 0:
            return self

        r = self.__class__()

        r.frames = N0.concatenate( (self.frames, traj[0].frames), 0 )

        r.setRef( self.ref.clone())

        if self.frameNames and traj[0].frameNames:
            r.frameNames = self.frameNames + traj[0].frameNames

        try:
            if self.pc is not None and traj[0].pc is not None:
                r.pc['p'] = N0.concatenate( (self.pc['p'], traj[0].pc['p']),0)
                r.pc['u'] = N0.concatenate( (self.pc['u'], traj[0].pc['u']),0)
        except TypeError as why:
            EHandler.error('cannot concat PC '+str(why) )

        r.profiles = self.profiles.concat( traj[0].profiles )

        ## recursively add other trajectories
        return r.concat( *traj[1:] )
Ejemplo n.º 16
0
 def _cease( self, ref ):
     try:
         self.alive = False
     except:
         EHandler.warning('error in CrossView._cease')
         pass
Ejemplo n.º 17
0
    def concat(self, *profiles):
        """
        Concatenate all profiles in this with corresponding profiles in the
        given ProfileCollection(s). Profiles that are not found in all
        ProfileCollections are skipped::
          p0.concat( p1 [, p2, ..]) -> single ProfileCollection with the
          same number of profiles as p0 but with the length of p0+p1+p2..

        :param profiles: profile(s) to concatenate
        :type  profiles: ProfileCollection(s)
        
        :return: concatenated profile(s)  
        :rtype: ProfileCollection / subclass
        """
        ## end recursion (no more arguments)
        if len(profiles) == 0:
            return self

        next = profiles[0]

        r = self.__class__()

        ##!!! BIG FAT WARNING: empty profilecollection does not imply empty model
        ## an empty PC w/o any profiles currently doesn't know which length
        ## is is supposed to have. If profLength == 0 for real, then
        ## the next PC's profiles don't need to be skipped
        ## Otherwise,
        ## this creates too-short profiles if the PC parent model has
        ## non-zero length and simply doesn't have any profiles registered.

        ##        ## special case 1: concat something to empty profile collection
        ##        if not self.keys():
        ##            return next.clone().concat( *profiles[1:] )
        ##
        ##        ## special case 2: concat empty profile collection to this one
        ##        if not next.keys():
        ##            return self.clone().concat( *profiles[1:] )
        ##
        allkeys = M.union(list(self.profiles.keys()), list(next.keys()))

        ##        for k, p in self.profiles.items():
        for k in allkeys:
            p = self.profiles.get(k, None)
            pnext = next.profiles.get(k, None)
            infos = {}

            if p is None:
                default = next[k, 'default']
                p = self.__clonedefault(pnext, self.profLength(), default)
                infos = next.infos[k]

            if pnext is None:
                default = self[k, 'default']
                pnext = self.__clonedefault(p, next.profLength(), default)
                infos = self.infos[k]

            try:
                if isinstance(p, N.ndarray):

                    if len(pnext) == 0:
                        pnext = pnext.astype(p.dtype)

                    r.set(k, N.concatenate((p, pnext)), **infos)

                else:
                    r.set(k, p + pnext, **infos)
            except:
                EHandler.warning("Profile %s skipped during concat." % k,
                                 error=1)
                r.remove(k)

        return r.concat(*profiles[1:])
Ejemplo n.º 18
0
 def _cease(self, ref):
     try:
         self.alive = False
     except:
         EHandler.warning('error in CrossView._cease')
         pass
Ejemplo n.º 19
0
    def concat( self, *profiles ):
        """
        Concatenate all profiles in this with corresponding profiles in the
        given ProfileCollection(s). Profiles that are not found in all
        ProfileCollections are skipped::
          p0.concat( p1 [, p2, ..]) -> single ProfileCollection with the
          same number of profiles as p0 but with the length of p0+p1+p2..

        :param profiles: profile(s) to concatenate
        :type  profiles: ProfileCollection(s)
        
        :return: concatenated profile(s)  
        :rtype: ProfileCollection / subclass
        """
        ## end recursion (no more arguments)
        if len( profiles ) == 0:
            return self

        next = profiles[0]

        r = self.__class__()
        
        ##!!! BIG FAT WARNING: empty profilecollection does not imply empty model
        ## an empty PC w/o any profiles currently doesn't know which length
        ## is is supposed to have. If profLength == 0 for real, then
        ## the next PC's profiles don't need to be skipped
        ## Otherwise,
        ## this creates too-short profiles if the PC parent model has 
        ## non-zero length and simply doesn't have any profiles registered.
        
##        ## special case 1: concat something to empty profile collection
##        if not self.keys():
##            return next.clone().concat( *profiles[1:] )
##
##        ## special case 2: concat empty profile collection to this one
##        if not next.keys():
##            return self.clone().concat( *profiles[1:] )
##                
        allkeys = M.union( list(self.profiles.keys()), list(next.keys()) )

##        for k, p in self.profiles.items():
        for k in allkeys:
            p = self.profiles.get(k, None)
            pnext = next.profiles.get(k, None)
            infos = {}
            
            if p is None:
                default = next[k,'default']
                p = self.__clonedefault(pnext, self.profLength(), default)
                infos = next.infos[k]
                
            if pnext is None:
                default = self[k,'default']
                pnext = self.__clonedefault(p, next.profLength(), default)
                infos = self.infos[k]

            try:
                if isinstance( p, N.ndarray ):
                    
                    if len(pnext) == 0:
                        pnext = pnext.astype(p.dtype)
                        
                    r.set( k, N.concatenate( (p, pnext) ), **infos )
                    
                else:
                    r.set( k, p + pnext, **infos )
            except:
                EHandler.warning("Profile %s skipped during concat." % k, 
                                 error=1)
                r.remove( k )

        return r.concat( *profiles[1:] )
Ejemplo n.º 20
0
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You find a copy of the GNU General Public License in the file
## license.txt along with this program; if not, write to the Free
## Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
"""
Protein-protein docking related modules
"""
from biskit import EHandler

try:
    from .complex import Complex, ComplexError
    from .complexList import ComplexList, ComplexListError
    from .complexModelRegistry import ComplexModelRegistry, RegistryError
    from .complexvc import ComplexVC
    from .complexvcList import ComplexVCList
    from .complextraj import ComplexTraj, ComplexTrajError
    from .complexrandomizer import ComplexRandomizer, ComplexMinimizer
    from .docker import Docker, DockerError
##    from FixedList import FixedList
##    from HexParser import HexParser
    from .delphiBindingEnergy import DelphiBindingEnergy

##     from Intervor import Intervor
##     from PatchGenerator import PatchGenerator
##     from PatchGeneratorFromOrbit import PatchGeneratorFromOrbit

except IOError as why:
    EHandler.warning("Couldn't import all biskit.dock modules.\n" + str(why))
Ejemplo n.º 21
0
    def concatEnsembles( self, *traj ):
        """
        Concatenate this with other trajectories in a zig zac manner,
        resulting in an ensembleTraj with additional members.
        The ref model of the new Trajectory is a 'semi-deep' copy of this
        trajectorie's model.(see :class:`PDBModel.take()` )::
          concat( traj [, traj2, traj3, ..] ) -> Trajectory
        
        :param traj: with identical atoms as this one
        :type  traj: one or more EnsembleTrajectory

        @todo: fix so that pc, and profiles are not lost
        """
        if len( traj ) == 0:
            return self

        r = self.__class__( n_members = self.n_members + traj[0].n_members )

        min_members = min( self.n_members, traj[0].n_members )
        min_frames = min( self.lenFrames(), traj[0].lenFrames() )

        steps = self.lenFrames()//self.n_members + \
                traj[0].lenFrames()//traj[0].n_members

        def __everyOther( traj_0, traj_1, list_0, list_1, minMembers,
                          minFrames, loops ):
            result = []
            for j in range( 0, minMembers/2 ):

                for i in range( j*loops , j*loops + minFrames*2/minMembers ):
                    result += [ list_0[i] ]
                    result += [ list_1[i] ]

                while i < j*traj_0.n_members:
                    result += [ list_0[i] ]

                while i < j*traj_1.n_members:
                    result += [ list_1[i] ]

            return result

        frames = __everyOther( self, traj[0], self.frames,
                               traj[0].frames, min_members,
                               min_frames, steps )

        r.frames = N0.array(frames) 
        r.setRef( self.ref.clone())

        if self.frameNames and traj[0].frameNames:
            r.frameNames =  __everyOther( self, traj[0], self.frameNames,
                                          traj[0].frameNames, min_members,
                                          min_frames, steps )
        try:
            # NOT TESTED!!
            if self.pc and traj[0].pc:
                r.pc['p'] =  __everyOther( self, traj[0], self.pc['p'],
                               traj[0].pc['p'], min_members, steps )

                r.pc['u'] =  __everyOther( self, traj[0], self.pc['u'],
                               traj[0].pc['u'], min_members, steps )

#                r.pc['p'] = N0.concatenate( (self.pc['p'], traj[0].pc['p']),0)
#                r.pc['u'] = N0.concatenate( (self.pc['u'], traj[0].pc['u']),0)
        except TypeError as why:
            EHandler.error('cannot concat PC '+str(why) )

#        r.profiles = self.profiles.concat( traj[0].profiles )

        ## recursively add other trajectories
        return r.concat( *traj[1:] )
Ejemplo n.º 22
0
    def concatEnsembles( self, *traj ):
        """
        Concatenate this with other trajectories in a zig zac manner,
        resulting in an ensembleTraj with additional members.
        The ref model of the new Trajectory is a 'semi-deep' copy of this
        trajectorie's model.(see :class:`PDBModel.take()` )::
          concat( traj [, traj2, traj3, ..] ) -> Trajectory
        
        :param traj: with identical atoms as this one
        :type  traj: one or more EnsembleTrajectory

        @todo: fix so that pc, and profiles are not lost
        """
        if len( traj ) == 0:
            return self

        r = self.__class__( n_members = self.n_members + traj[0].n_members )

        min_members = min( self.n_members, traj[0].n_members )
        min_frames = min( self.lenFrames(), traj[0].lenFrames() )

        steps = self.lenFrames()//self.n_members + \
                traj[0].lenFrames()//traj[0].n_members

        def __everyOther( traj_0, traj_1, list_0, list_1, minMembers,
                          minFrames, loops ):
            result = []
            for j in range( 0, minMembers/2 ):

                for i in range( j*loops , j*loops + minFrames*2/minMembers ):
                    result += [ list_0[i] ]
                    result += [ list_1[i] ]

                while i < j*traj_0.n_members:
                    result += [ list_0[i] ]

                while i < j*traj_1.n_members:
                    result += [ list_1[i] ]

            return result

        frames = __everyOther( self, traj[0], self.frames,
                               traj[0].frames, min_members,
                               min_frames, steps )

        r.frames = N0.array(frames) 
        r.setRef( self.ref.clone())

        if self.frameNames and traj[0].frameNames:
            r.frameNames =  __everyOther( self, traj[0], self.frameNames,
                                          traj[0].frameNames, min_members,
                                          min_frames, steps )
        try:
            # NOT TESTED!!
            if self.pc and traj[0].pc:
                r.pc['p'] =  __everyOther( self, traj[0], self.pc['p'],
                               traj[0].pc['p'], min_members, steps )

                r.pc['u'] =  __everyOther( self, traj[0], self.pc['u'],
                               traj[0].pc['u'], min_members, steps )

#                r.pc['p'] = N0.concatenate( (self.pc['p'], traj[0].pc['p']),0)
#                r.pc['u'] = N0.concatenate( (self.pc['u'], traj[0].pc['u']),0)
        except TypeError as why:
            EHandler.error('cannot concat PC '+str(why) )

#        r.profiles = self.profiles.concat( traj[0].profiles )

        ## recursively add other trajectories
        return r.concat( *traj[1:] )