def calc_hotspot(path, prot_name, method, nrot=3000): """ param: path str, path to prepared protein """ protein = prepare_protein(path) h = Runner() settings = h.Settings() settings.nrotations = nrot settings.apolar_translation_threshold = 15 settings.polar_translation_threshold = 15 settings.sphere_maps = False result = h.from_protein(protein=protein, charged_probes=False, probe_size=7, buriedness_method=method, cavities=None, nprocesses=3, settings=settings) #out = make_savedir(prot_name) out = os.getcwd() with hs_io.HotspotWriter(out, visualisation="pymol", grid_extension=".ccp4", zip_results=False) as writer: writer.write(result) return result
def run_hotspot_calculation(self, method="ghecom", sphere_maps=False): """ Runs the hotspots calculation on the specified PDB structure :return: """ h = Runner() settings = h.Settings() settings.nrotations = self.number_rotations settings.apolar_translation_threshold = 15 settings.polar_translation_threshold = 15 settings.sphere_maps = sphere_maps result = h.from_protein(protein=self.prepare_protein(), charged_probes=self.charged, probe_size=7, buriedness_method=method, cavities=None, nprocesses=1, settings=settings) #self.out_dir = self.make_savedir() # Save and zip the SuperStar Grids: self._save_superstar_grids(h) # Save and zip the Results with hs_io.HotspotWriter(self.out_dir, visualisation="pymol", grid_extension=".ccp4", zip_results=True) as writer: writer.write(result)
def __init__(self): super(self.__class__, self).__init__(description=__doc__) # handle command line arguments self.add_argument( 'path', help='path to working directory' ) self.add_argument( 'pdb', help='PDB code for target' ) self.add_argument( 'chemical_id', help='PDB code for target' ) self.add_argument( '-hs', '--hotspot_guided', default=True, help='Use Hotspot insights to guide docking' ) self.args = self.parse_args() # create temp for output files self.temp = tempfile.mkdtemp() # calculate hotspot using Hotspots API if self.args.hotspot_guided is True: try: self.hr = hs_io.HotspotReader(path=os.path.join(self.args.path, "out.zip")).read() except IOError: h = calculation.Runner() settings = h.Settings() settings.nrotations = 3000 settings.sphere_maps = True self.hr = h.from_pdb(pdb_code=self.args.pdb, charged_probes=True, buriedness_method='ghecom', nprocesses=5, settings=settings) with hs_io.HotspotWriter(path=os.path.join(self.args.path), zip_results=True) as hw: hw.write(self.hr) # generate molecule for docking self.search_ligands = os.path.join(self.temp, self.args.chemical_id + ".mol2") self.ligand = self.from_smiles(smiles=_Ligand.from_chemicalid(chemicalid=self.args.chemical_id).smiles, path=self.search_ligands, identifier=self.args.chemical_id) # dock search ligands into hotspot protein self.docked_ligands = self.dock() if self.args.hotspot_guided is True: self.rescored_ligands = self.rescore()
def run_hotspot_calculation(self, method="ghecom"): """ Runs the hotspots calculation on the specified PDB structure :return: """ h = Runner() settings = h.Settings() settings.nrotations = self.number_rotations settings.apolar_translation_threshold = 15 settings.polar_translation_threshold = 15 settings.sphere_maps = self.spheres # Check if SuperStar and Ghecom have already been run. super_archive_path = Path(self.out_dir.parent, "superstar_grids.zip") if super_archive_path.exists(): super_tmp_path = Path(self.out_dir.parent, super_archive_path.stem) if not super_tmp_path.exists(): super_tmp_path.mkdir() unpack_archive(super_archive_path, super_tmp_path, 'zip') b_grid = Grid.from_file( str(Path(super_tmp_path, 'buriedness.ccp4').resolve())) result = h.from_superstar( protein=self.prepare_protein(), superstar_grids=self.create_atomic_hotspots(super_tmp_path), buriedness=b_grid, charged_probes=self.charged, settings=settings, clear_tmp=True) rmtree(super_tmp_path) else: result = h.from_protein(protein=self.prepare_protein(), charged_probes=self.charged, probe_size=7, buriedness_method=method, cavities=None, nprocesses=1, settings=settings) # Save and zip the SuperStar Grids: self._save_superstar_grids(h) # Save and zip the Results with hs_io.HotspotWriter(str(self.out_dir.resolve()), visualisation="pymol", grid_extension=".ccp4", zip_results=True) as writer: writer.write(result) print(f"out_file: {str(Path(self.out_dir, 'out.zip').resolve())}") return Path(self.out_dir, 'out.zip')
def run_hotspot_calculation(self, nrot=100000, method="ghecom", charged=True, sphere_maps=False, save_ligand=True): """ Runs the hotspots calculation on the specified PDB structure :return: """ if not self.out_dir: self.out_dir = self.make_savedir() if not self.protein_path: self.protein_path = self.find_protein() protein = self.prepare_protein() else: protein = Protein.from_file(self.protein_path) if save_ligand: self.extract_ligands() # log the run parameters self.log_runner(nrot) h = Runner() settings = h.Settings() settings.nrotations = nrot settings.apolar_translation_threshold = 15 settings.polar_translation_threshold = 15 settings.sphere_maps = sphere_maps result = h.from_protein(protein=protein, charged_probes=charged, probe_size=7, buriedness_method=method, cavities=None, nprocesses=5, settings=settings) #self.out_dir = self.make_savedir() # Save and zip the SuperStar Grids: self._save_superstar_grids(h) # Save and zip the Results with hs_io.HotspotWriter(self.out_dir, visualisation="pymol", grid_extension=".ccp4", zip_results=True) as writer: writer.write(result)
print target for pdb in pdbs: chain = chains[pdb] ligand_id = ligands[pdb] out_dir = os.path.join(base, target, pdb, "reference") if not os.path.exists(out_dir): os.mkdir(out_dir) try: p = PharmacophoreModel._from_siena(pdb, ligand_id, mode, target, out_dir=out_dir) p.write(os.path.join(out_dir, "reference_pharmacophore.py")) prot = hs_io.HotspotReader( os.path.join(base, target, pdb, "out.zip")).read().protein hs = Results(protein=prot, super_grids=p.dic) with hs_io.HotspotWriter(out_dir) as wf: wf.write(hs) with io.MoleculeWriter(os.path.join(out_dir, "aligned.mol2")) as w: for l in p.representatives: w.write(l) except RuntimeError: print "skipped {}".format(target)
def dock(self): """ Setup and execution of docking run with GOLD. NB: Docking Settings class is imported from the Hotspots API rather than Docking API. This is essential for running hotspot guided docking. :return: a :class:`ccdc.io.MoleculeReader` """ docker = Docker() docker.settings = hs_docking.DockerSettings() # download protein PDBResult(self.args.pdb).download(self.temp) protein = Protein.from_file( os.path.join(self.temp, self.args.pdb + ".pdb")) protein.remove_all_waters() protein.remove_all_metals() protein.add_hydrogens() for l in protein.ligands: protein.remove_ligand(l.identifier) f = os.path.join(self.temp, self.args.pdb + ".mol2") with MoleculeWriter(f) as w: w.write(protein) # setup docker.settings.add_protein_file(f) # create binding site from list of residues cavs = Cavity.from_pdb_file( os.path.join(self.temp, self.args.pdb + ".pdb")) cavs[0].to_pymol_file("test.py") c = {} for i, cav in enumerate(cavs): cav.feats = [] for f in cav.features: try: cav.feats.append(f.residue) except: continue # cav.feats = [f.residue for f in cav.features] cav.len = len(cav.feats) c.update({cav.len: cav.feats}) cav.to_pymol_file("{}.py".format(i)) selected_cavity = max(c.keys()) docker.settings.binding_site = docker.settings.BindingSiteFromListOfResidues( protein=docker.settings.proteins[0], residues=c[selected_cavity]) docker.settings.fitness_function = 'plp' docker.settings.autoscale = 100. docker.settings.output_directory = self.temp docker.settings.output_file = "docked_ligands.mol2" docker.settings.add_ligand_file(self.search_ligands, ndocks=25) # constraints if self.args.hotspot_guided is True: e_settings = result.Extractor.Settings() e_settings.mvon = True extractor = result.Extractor(self.hr, settings=e_settings) bv = extractor.extract_best_volume(volume=300)[0] f = hs_utilities.Helper.get_out_dir( os.path.join(self.args.path, "best_volume")) with hs_io.HotspotWriter(path=f) as hw: hw.write(bv) constraints = docker.settings.HotspotHBondConstraint.create( protein=docker.settings.proteins[0], hr=bv, weight=5, min_hbond_score=0.2, max_constraints=5) for constraint in constraints: docker.settings.add_constraint(constraint) docker.settings.generate_fitting_points(hr=bv) mol = Molecule(identifier="constraints") for constraint in constraints: for a in constraint.atoms: mol.add_atom( Atom(atomic_symbol="C", atomic_number=14, label="Du", coordinates=a.coordinates)) with MoleculeWriter(os.path.join(self.args.path, "constaints.mol2")) as w: w.write(mol) docker.dock() results = docker.Results(docker.settings) return results.ligands
prot2_name = "BAZ2B" prot1_paths = glob(join(os.getcwd(), "{}*".format(prot1_name), "out.zip")) print(prot1_paths) prot2_paths = glob(join(os.getcwd(), "{}*".format(prot2_name), "out.zip")) print(prot2_paths) prot1_res_list = [hs_io.HotspotReader(p).read() for p in prot1_paths] prot2_res_list = [hs_io.HotspotReader(p).read() for p in prot2_paths] # Calculate ensemble hotspots for the two proteins ensemble_1 = Results.from_grid_ensembles(prot1_res_list, prot1_name) #Save ensemble: out1 = make_savedir(prot1_name, "ensemble") with hs_io.HotspotWriter(out1, visualisation="pymol", grid_extension=".ccp4", zip_results=True) as writer: writer.write(ensemble_1) del (prot1_res_list) ensemble_2 = Results.from_grid_ensembles(prot2_res_list, prot2_name) #Save ensemble: out2 = make_savedir(prot2_name, "ensemble") with hs_io.HotspotWriter(out2, visualisation="pymol", grid_extension=".ccp4", zip_results=True) as writer: writer.write(ensemble_2) del (prot2_res_list) # Get difference maps