sys.path.append('../../affinityDB/db_libs') import database, sqlite3 import imp db_dir = os.path.join(os.path.abspath(os.getcwd()), 'database') db_path = os.path.join(db_dir, 'database.db') data_dir = os.path.join(db_dir, 'data') if not os.path.exists(data_dir): os.makedirs(data_dir) afdb = database.AffinityDB(db_path) database_master = database.DatabaseMaster(db_path) """ with open('../VDS1/data/main_pdb_target_list.txt') as f: pdb_list = f.readline().strip().split(', ') pdb_list = pdb_list[:2] pdb_list = [str(p) for p in pdb_list] fp, path, descr = imp.find_module('download_pdb_op') lib_mod = imp.load_module('download_pdb_op', fp, path, descr) lib_mod.Download_pdb_init(db_path=data_dir, download_dir = 'download') afdb.run_multithread("download_pdb", arg_types=[str],
print "sum test took: ", time.time( Test_multout_init() afdb.run_multithread("test_multout", arg_types=[int], arg_lists=[arg_ones], out_types=[int], out_names=['remainder'], num_threads=20,commit_sec=1) print "multout test took: ", time.time() - start, "seconds" my_db = database.DatabaseMaster(db_path) start = time.time() run_idx = my_db.retrieve("arg_001_test_multout", ["run_idx"], {"run_state":"{}==1"})[0] print "len run idx:", len(run_idx) out_idx = my_db.retrieve("out_001_test_multout", ["run_idx"], {"run_idx":"{}<100000"})[0] print "eln out idx", len(out_idx) idx,val,order = my_db.list_search(out_idx,run_idx) print idx
import sys, os, sqlite3, time from glob import glob from rdkit import Chem sys.path.append('../../affinityDB') import database sys.path.append('../../affinityDB/dataset_libs') import NEW base_dir = '/home/cosmynx/Documents/database' db_path = os.path.join(base_dir, 'labeled_pdb.db') if os.path.isfile(db_path): os.system('rm ' + db_path) afdb = database.AffinityDB(db_path) db_editor = database.DatabaseMaster(db_path) # out_db_path = os.path.join(base_dir, 'labeled_pdb_out.db') # if os.path.isfile(out_db_path): # os.system('rm ' + out_db_path) # afdb = database.AffinityDB(out_db_path) """Convert all the PDB files into mol files and generate ligand conformers-------""" start = time.time() ligand_files = glob(os.path.join(base_dir, 'labeled_pdb/crystal_ligands' + '/**/', '*[_]*.pdb'))[:] NEW.GenerateConformersInit(base_dir=base_dir, num_conformers=100) afdb.run_multithread(func="NEW.generate_conformers", arg_types=[str], arg_lists=[ligand_files], out_types=[str, str, int],
import os, sys, time import numpy as np sys.path.append('../../affinityDB') sys.path.append('../../affinityDB/lib_multithread') import database, sqlite3 #import VDS1 from download_pdb_op import Download_pdb_init, download_pdb from split_pdb_op import Split_pdb_init, split_pdb from generate_conformers_op import Generate_conformers_init, generate_conformers from pdb2mol_op import Pdb2mol_init, pdb2mol from search_decoys_op import Search_decoys_init, search_decoys db_root = "/home/maksym/Desktop/vds1/" #os.remove(db_path) afdb = database.AffinityDB(db_root, "test") database_master = database.DatabaseMaster(os.path.join(db_root, "test.db")) with open("./data/main_pdb_target_list.txt") as f: raw_pdb_list = f.readlines() pdb_list = raw_pdb_list[0].split(", ") print "number of pdbs to download:", len(pdb_list), "will download only 20" pdb_id_set = [(unicode(pdb_name), ) for pdb_name in pdb_list[:20]] # download 20 pdbs Download_pdb_init(db_root=db_root, download_dir="download_pdbs1") afdb.run_multithread("download_pdb", arg_sets=pdb_id_set) # # split 20 PDBs disk_pdbs = database_master.retrieve("out_000_download_pdb", ["pdb_id", "pdb_file"], {})