Example #1
0
def get_secstruct(pose):
    """ Uses DSSP to get a secondary structure string for the given pose """
    sec_struct = Dssp(pose)
    sec_struct.insert_ss_into_pose(pose)
    ss = str(pose.secstruct())

    return ss
 def __init__(self, pdbid, dist=8.0):
     """'dist' is the maximum distance between two residues to
     consider them as part of an interface"""
     self.dist = dist
     self.pdbid = pdbid
     pdb_prefix = '/wynton/home/database/pdb/remediated/pdb/'
     self.path = os.path.join(pdb_prefix, self.pdbid[1:3], 'pdb{}.ent.gz'.format(
         self.pdbid
         ))
     # self.path = pdbid
     self.pose = pose_from_file(self.path)
     ss_str = Dssp(self.pose).get_dssp_secstruct()
     self.secstruct = contiguous_secstruct(ss_str)
Example #3
0
 def bblock(self, pdbkey):
     if isinstance(pdbkey, list):
         return [self.bblock(f) for f in pdbkey]
     if isinstance(pdbkey, (str, bytes)):
         pdbkey = hash_str_to_int(pdbkey)
     assert isinstance(pdbkey, int)
     if not pdbkey in self._bblock_cache:
         pdbfile = self._key_to_pdbfile[pdbkey]
         pose = self.pose(pdbfile)
         entry = self._dictdb[pdbfile]
         ss = Dssp(pose).get_dssp_secstruct()
         bblock = BBlock(entry, pdbfile, pdbkey, pose, ss)
         self._bblock_cache[pdbkey] = bblock
     return self._bblock_cache[pdbkey]
Example #4
0
    def build_pdb_data(self, entry, uselock=True):
        """return Nnew, Nmissing"""
        pdbfile = entry['file']
        pdbkey = hash_str_to_int(pdbfile)
        cachefile = self.bblockfile(pdbkey)
        posefile = self.posefile(pdbfile)
        if os.path.exists(cachefile):
            if not self.load_cached_bblock_into_memory(pdbkey):
                if os.path.exists(cachefile):
                    raise ValueError(
                        f'cachefile {cachefile} exists, but cant load data from associated key {pdbkey}'
                    )
                raise ValueError(
                    f'cachefile {cachefile} was removed, cant load data from associated key {pdbkey}'
                )
            if self.load_poses:
                if not self.load_cached_pose_into_memory(pdbfile):
                    print('warning, not saved:', pdbfile)
            return None, None  # new, missing
        elif self.read_new_pdbs:
            if uselock: self.check_lock_cachedir()
            read_pdb = False
            # info('CachingBBlockDB.build_pdb_data reading %s' % pdbfile)
            pose = self.pose(pdbfile)
            ss = Dssp(pose).get_dssp_secstruct()
            bblock = BBlock(entry, pdbfile, pdbkey, pose, ss)
            self._bblock_cache[pdbkey] = bblock
            # print(cachefile)
            with open(cachefile, 'wb') as f:
                pickle.dump(bblock._state, f)
            # print('saved new bblock cache file', cachefile)
            if not os.path.exists(posefile):
                try:
                    with open(posefile, 'wb') as f:
                        pickle.dump(pose, f)
                        info('dumped _bblock_cache files for %s' % pdbfile)
                except OSError as e:
                    print('not saving', posefile)

            if self.load_poses:
                self._poses_cache[pdbfile] = pose
            return pdbfile, None  # new, missing
        else:
            warning('no cached data for: ' + pdbfile)
            return None, pdbfile  # new, missing
Example #5
0
    def build_pdb_data(self, entry):
        """return Nnew, Nmissing

        Args:
            entry (TYPE): Description

        Returns:
            TYPE: Description
        """
        pdbfile = entry['file']
        cachefile = os.path.join(self.cachedir, 'bblock',
                                 flatten_path(pdbfile))
        posefile = self.posefile(pdbfile)
        if os.path.exists(cachefile):
            assert self.load_cached_bblock_into_memory(pdbfile)
            if self.load_poses:
                assert self.load_cached_pose_into_memory(pdbfile)
            return None, None  # new, missing
        elif self.read_new_pdbs:
            self.check_lock_cachedir()
            read_pdb = False
            # info('BBlockDB.build_pdb_data reading %s' % pdbfile)
            pose = self.pose(pdbfile)
            ss = Dssp(pose).get_dssp_secstruct()
            bblock = BBlock(entry, pdbfile, pose, ss)
            self._bblock_cache[pdbfile] = bblock

            with open(cachefile, 'wb') as f:
                pickle.dump(bblock._state, f)
            if not os.path.exists(posefile):
                with open(posefile, 'wb') as f:
                    pickle.dump(pose, f)
                    info('dumped _bblock_cache files for %s' % pdbfile)

            if self.load_poses:
                self._poses_cache[pdbfile] = pose
            return pdbfile, None  # new, missing
        else:
            warning('no cached data for: ' + pdbfile)
            return None, pdbfile  # new, missing
Example #6
0
 def __getitem__(self, index):
     result = pose_from_file(self.data[index])
     transformed = self.transform(result)
     secondary = Dssp(transformed)
     return (transformed, result.sequence(),
             secondary.get_dssp_unreduced_secstruct())