def explore_network(self): processed = set() for node_id in self.net.network.nodes(): name = gtostr(self.net.network.nodes[node_id]['struct']) name = ''.join(sorted(name)) if node_id < self.net.num_monomers: self.net.network.nodes[node_id]['first'] = True else: self.net.network.nodes[node_id]['first'] = False for predecessors in self.net.get_reactant_sets(node_id): if name not in processed: # this pattern has not yet been processed. r_score, pr_score = self.score_reaction(predecessors) self.net.network.nodes[node_id][ 'first'] = True # attribute to tell whether score is inherited from previous node with this pattern processed.add(name) else: # we will write the score to every node, and tell whether is energetically meaningful with "first" attribute. r_score = pr_score = self.written[name][1] self.net.network.nodes[node_id][ 'score'] = pr_score # add score attribute for n in predecessors: self.net.network.edges[( n, node_id)]['rxn_score'] = pr_score - r_score self.net.is_energy_set = True
def score_reaction(self, reactant_ids: Union[list, set]): names = [ ''.join(sorted(gtostr(self.net.network.nodes[rid]['struct']))) for rid in reactant_ids ] prebound_score = sum([self.written[n][1] for n in names]) new_pdb_str = '' for name in names: reactant_file = os.path.join(self.sub_dir, name + '.clean.pdb') with open(reactant_file, 'r') as f: new_pdb_str += f.read() new_pdb_name = ''.join(sorted(names)) new_pdb_path = os.path.join(self.sub_dir, new_pdb_name + '.clean.pdb') with open(new_pdb_path, 'w') as f: f.write(new_pdb_str) new_pose, bound_score = self._pose_from_pdb(new_pdb_path) self.written[new_pdb_name] = (new_pose, bound_score) return bound_score, prebound_score
def __init__(self, net: ReactionNetwork, subunit_dir: str): rosetta_init() self.net = net self.monomer_pdb = [ os.path.join(subunit_dir, subunit) for subunit in os.listdir(os.path.join(subunit_dir, 'monomers')) ] self.sub_dir = subunit_dir self.written = dict() self.scorefxn = get_fa_scorefxn # note this score function is only guaranteed consistent in this instance! self._preprocess() for file in os.listdir(subunit_dir): if ".clean.pdb" in file: m_pose = os.path.join(subunit_dir, file) self.written[strip_pdb_ext(file)] = self._pose_from_pdb(m_pose) for i in range(self.net.num_monomers): self.net.network.nodes[i]['score'] = self.written[gtostr( self.net.network.nodes[i]['struct'])][1]
def __init__(self, net: ReactionNetwork, subunit_dir: str): rosetta_init() self.net = net self.monomer_pdb = [ os.path.abspath(os.path.join(subunit_dir, 'monomers', subunit)) for subunit in os.listdir(os.path.join(subunit_dir, 'monomers')) ] self.sub_dir = os.path.abspath(subunit_dir) self.written = dict() self.scorefxn = get_fa_scorefxn( ) # note this score function is only guaranteed consistent in this instance! self.relaxer = relax.FastRelax() self.relaxer.set_scorefxn(self.scorefxn) self.relaxer.max_iter(200) self._preprocess() self._load_existing() subunit_files = set([ os.path.join(self.sub_dir, file) if '.clean' in file and '.relaxed' not in file else '' for file in os.listdir(subunit_dir) ]) subunit_files.remove('') subunit_files = list(subunit_files) try: with Pool(len(subunit_files)) as p: results = p.map(self._pose_from_pdb, subunit_files) except Exception: print( "Lacking serializable rosetta build. Parallel processing disabled. " "Recommend compiling from source with --serialization flag \n " "Continuing to process sequentially", sys.stderr) results = [self._pose_from_pdb(sunit) for sunit in subunit_files] for i, res in enumerate(results): self.written[strip_pdb_ext(os.path.split( subunit_files[i])[1])] = res for i in range(self.net.num_monomers): self.net.network.nodes[i]['score'] = self.written[gtostr( self.net.network.nodes[i]['struct'])][1]