def one_slurm(list_smiles, server, unique_id, name, target='drd3', parallel=True, exhaustiveness=16, mean=False, load=False): """ :param list_smiles: :param server: :param unique_id: :param name: :param parallel: :param exhaustiveness: :param mean: :param load: :return: """ pythonsh, vina = set_path(server) dirname = os.path.join(script_dir, 'results', name, 'docking_small_results') dump_path = os.path.join(dirname, f"{unique_id}.csv") header = ['smile', 'score'] with open(dump_path, 'w', newline='') as csvfile: csv.writer(csvfile).writerow(header) for smile in list_smiles: score_smile = dock(smile, target='drd3', unique_id=unique_id, parallel=parallel, exhaustiveness=exhaustiveness, mean=mean, pythonsh=pythonsh, vina=vina, load=load) # score_smile = 0 with open(dump_path, 'a', newline='') as csvfile: list_to_write = [smile, score_smile] csv.writer(csvfile).writerow(list_to_write)
def one_dock(smile, server, parallel=False, exhaustiveness=16, mean=False, load=False, target='drd3'): pythonsh, vina = set_path(server) score_smile = dock(smile, unique_id=smile, parallel=parallel, exhaustiveness=exhaustiveness, mean=mean, pythonsh=pythonsh, vina=vina, load=load, target=target) return score_smile
# We load the data if args.obj != 'docking': X = np.loadtxt( '../../data/latent_features_and_targets/latent_features.txt') y = -np.loadtxt( f'../../data/latent_features_and_targets/targets_{args.obj}.txt') X = X[:args.n_init, ] y = y[:args.n_init] else: X = np.loadtxt( '../../data/latent_features_and_targets/latent_features_docking.txt') # We want to minimize docking scores => no need to take (-scores) y = -np.loadtxt( f'../../data/latent_features_and_targets/targets_{args.obj}.txt') PYTHONSH, VINA = set_path(args.server) with open('250k_docking_scores.pickle', 'rb') as f: docked = pickle.load(f) def dock_one(enum_tuple): """ Docks one smiles. Input = tuple from enumerate iterator""" identifier, smiles = enum_tuple if smiles in docked: return docked[smiles] else: return dock(smiles, identifier, PYTHONSH, VINA, parallel=False,