def load_cached_bblock_into_memory(self, pdbkey, cache_replace=True): assert not isinstance(pdbkey, (str, bytes)) if not isinstance(pdbkey, (int, str)): success = True for f in pdbkey: success &= self.load_cached_bblock_into_memory(f) return success bblockfile = self.bblockfile(pdbkey) if not os.path.exists(bblockfile): # print(f'warning: bblock cachefile not found {bblockfile}') return False with open(bblockfile, 'rb') as f: bbstate = list(pickle.load(f)) entry = self._dictdb[self._key_to_pdbfile[pdbkey]] newjson = json.dumps(entry).encode() if bytes(bbstate[0]) == newjson: self._bblock_cache[pdbkey] = _BBlock(*bbstate) return True print('!!! database entry updated for key', pdbkey, entry['file']) if cache_replace: print(' removing cachefile', bblockfile) os.remove(bblockfile) print(' reloading info cache', entry['file']) self.load_pdbs_multiprocess([entry['file']], parallel=0) return self.load_cached_bblock_into_memory(pdbkey, cache_replace=False) return False
def worms_main_protocol(criteria, bbs_states=None, **kw): try: if bbs_states is not None: kw['bbs'] = [tuple(_BBlock(*s) for s in bb) for bb in bbs_states] ssdag, result1, log = search_func(criteria, **kw) if result1 is None: return [] result2 = prune_clashes(ssdag, criteria, result1, **kw) result3 = check_geometry(ssdag, criteria, result2, **kw) log = [] if True: # len(result3.idx) > 0: msg = f'nresults after clash/geom check {len(result3.idx):,}' log.append(' ' + msg) print(log[-1]) log += filter_and_output_results(criteria, ssdag, result3, **kw) if not kw['pbar']: print(f'completed: mbb{kw["merge_bblock"]:04}') sys.stdout.flush() return log except Exception as e: print('error on mbb' + str(kw['merge_bblock'])) print(type(e)) print(traceback.format_exc()) print(e) sys.stdout.flush() return []
def __setstate__(self, state): self.bbspec = state[0] self.bbs = tuple(tuple(_BBlock(*x) for x in bb) for bb in state[1]) self.verts = tuple(_Vertex(*x) for x in state[2]) self.edges = tuple(_Edge(*x) for x in state[3]) _validate_bbs_verts(self.bbs, self.verts) assert len(self.bbs) == len(self.verts) == len(self.edges) + 1
def load_cached_bblock_into_memory(self, pdbfile): """TODO: Summary Args: pdbfile (TYPE): Description Returns: TYPE: Description """ if not isinstance(pdbfile, str): success = True for f in pdbfile: success &= self.load_cached_bblock_into_memory(f) return success bblockfile = self.bblockfile(pdbfile) try: with open(bblockfile, 'rb') as f: bbstate = list(pickle.load(f)) self._bblock_cache[pdbfile] = _BBlock(*bbstate) return True except FileNotFound: return False
def vertex_single(bbstate, bbid, din, dout, min_seg_len): """Summary Args: bb (TYPE): Description bbid (TYPE): Description din (TYPE): Description dout (TYPE): Description min_seg_len (TYPE): Description Returns: TYPE: Description """ bb = _BBlock(*bbstate) ires0, ires1 = [], [] isite0, isite1 = [], [] for i in range(bb.n_connections): ires = bb.conn_resids(i) if bb.conn_dirn(i) == din: ires0.append(ires) isite0.append(np.repeat(i, len(ires))) if bb.conn_dirn(i) == dout: ires1.append(ires) isite1.append(np.repeat(i, len(ires))) if (din < 2 and not ires0 or dout < 2 and not ires1): dirn = 'NC_'[din] + 'NC_'[dout] warning('invalid vertex ' + dirn + ' ' + bytes(bb.file).decode()) return None dummy = [np.array([-1], dtype='i4')] ires0 = np.concatenate(ires0 or dummy) ires1 = np.concatenate(ires1 or dummy) isite0 = np.concatenate(isite0 or dummy) isite1 = np.concatenate(isite1 or dummy) chain0 = chain_of_ires(bb, ires0) chain1 = chain_of_ires(bb, ires1) if ires0[0] == -1: assert len(ires0) is 1 else: assert np.all(ires0 >= 0) if ires1[0] == -1: assert len(ires1) is 1 else: assert np.all(ires1 >= 0) if ires0[0] == -1: stub0inv = np.eye(4).reshape(1, 4, 4) else: stub0inv = np.linalg.inv(bb.stubs[ires0]) if ires1[0] == -1: stub1 = np.eye(4).reshape(1, 4, 4) else: stub1 = bb.stubs[ires1] stub0inv, stub1 = np.broadcast_arrays(stub0inv[:, None], stub1) ires = np.stack(np.broadcast_arrays(ires0[:, None], ires1), axis=-1) isite = np.stack(np.broadcast_arrays(isite0[:, None], isite1), axis=-1) chain = np.stack(np.broadcast_arrays(chain0[:, None], chain1), axis=-1) x2exit = stub0inv @ stub1 x2orig = stub0inv assert is_homog_xform(x2exit) # this could be slowish assert is_homog_xform(x2orig) # min chain len, not same site not_same_chain = chain[..., 0] != chain[..., 1] not_same_site = isite[..., 0] != isite[..., 1] seqsep = np.abs(ires[..., 0] - ires[..., 1]) # remove invalid in/out pairs (+ is or, * is and) valid = not_same_site valid *= (not_same_chain + (seqsep >= min_seg_len)) valid = valid.reshape(-1) if sum(valid) == 0: return None return ( x2exit.reshape(-1, 4, 4)[valid], x2orig.reshape(-1, 4, 4)[valid], ires.reshape(-1, 2)[valid].astype('i4'), isite.reshape(-1, 2)[valid].astype('i4'), chain.reshape(-1, 2)[valid].astype('i4'), np.repeat(bbid, sum(valid)).astype('i4'), )
def vertex_single(bbstate, bbid, din, dout, min_seg_len, verbosity=0): """build on bblock's worth of vertex""" bb = _BBlock(*bbstate) ires0, ires1 = [], [] isite0, isite1 = [], [] for i in range(bb.n_connections): ires = bb.conn_resids(i) if bb.conn_dirn(i) == din: ires0.append(ires) isite0.append(np.repeat(i, len(ires))) if bb.conn_dirn(i) == dout: ires1.append(ires) isite1.append(np.repeat(i, len(ires))) dirn = "NC_"[din] + "NC_"[dout] if din < 2 and not ires0 or dout < 2 and not ires1: if verbosity > 0: warning("invalid vertex " + dirn + " " + bytes(bb.file).decode()) return None dummy = [np.array([-1], dtype="i4")] ires0 = np.concatenate(ires0 or dummy) ires1 = np.concatenate(ires1 or dummy) isite0 = np.concatenate(isite0 or dummy) isite1 = np.concatenate(isite1 or dummy) chain0 = chain_of_ires(bb, ires0) chain1 = chain_of_ires(bb, ires1) if ires0[0] == -1: assert len(ires0) is 1 else: assert np.all(ires0 >= 0) if ires1[0] == -1: assert len(ires1) is 1 else: assert np.all(ires1 >= 0) if ires0[0] == -1: stub0inv = np.eye(4).reshape(1, 4, 4) else: stub0inv = np.linalg.inv(bb.stubs[ires0]) if ires1[0] == -1: stub1 = np.eye(4).reshape(1, 4, 4) else: stub1 = bb.stubs[ires1] assert _check_inorder(ires0) assert _check_inorder(ires1) stub0inv, stub1 = np.broadcast_arrays(stub0inv[:, None], stub1) ires = np.stack(np.broadcast_arrays(ires0[:, None], ires1), axis=-1) isite = np.stack(np.broadcast_arrays(isite0[:, None], isite1), axis=-1) chain = np.stack(np.broadcast_arrays(chain0[:, None], chain1), axis=-1) x2exit = stub0inv @ stub1 x2orig = stub0inv # assert is_homog_xform(x2exit) # this could be slowish # assert is_homog_xform(x2orig) # min chain len, not same site not_same_chain = chain[..., 0] != chain[..., 1] not_same_site = isite[..., 0] != isite[..., 1] seqsep = np.abs(ires[..., 0] - ires[..., 1]) # remove invalid in/out pairs (+ is or, * is and) valid = not_same_site valid *= not_same_chain + (seqsep >= min_seg_len) valid = valid.reshape(-1) if np.sum(valid) == 0: return None return ( x2exit.reshape(-1, 4, 4)[valid], x2orig.reshape(-1, 4, 4)[valid], ires.reshape(-1, 2)[valid].astype("i4"), isite.reshape(-1, 2)[valid].astype("i4"), chain.reshape(-1, 2)[valid].astype("i4"), np.repeat(bbid, np.sum(valid)).astype("i4"), )