def calculate_E_no_bias(E_no_bias_all, qtanh_all, umb_params, N_k_bias): with open("umbrella_params", "r") as fin: params = fin.readline().split() qtanh_center = float(params[0]) kumb = float(params[1]) gamma = float(params[2]) n_frames_toss = int(params[4])/1000 umb_params.append([qtanh_center, kumb, n_frames_toss]) if not os.path.exists("qtanh.npy"): pairs = np.loadtxt("umbrella_params", usecols=(0,1), dtype=int, skiprows=1) - 1 r0 = np.loadtxt("umbrella_params", usecols=(2,), skiprows=1) widths = (2./gamma)*np.ones(len(pairs)) qtanhsum_obs = observables.TanhContactSum("conf.gro", pairs, 1.2*r0, widths) qtanh_temp = observables.calculate_observable(["traj.xtc"], qtanhsum_obs)[0][n_frames_toss:] np.save("qtanh.npy", qtanh_temp) else: qtanh_temp = np.load("qtanh.npy") # collect needed info for this umbrella Ebias = 0.5*kumb*((qtanh_temp - qtanh_center)**2) Epot = np.loadtxt("Epot.dat", usecols=(1,))[n_frames_toss:] E_no_bias_all.append(Epot - Ebias) qtanh_all.append(qtanh_temp) N_k_bias.append(qtanh_temp.shape[0])
def calculate_E_no_bias(E_no_bias_all, qtanh_all, umb_params, N_k_bias): with open("umbrella_params", "r") as fin: params = fin.readline().split() qtanh_center = float(params[0]) kumb = float(params[1]) gamma = float(params[2]) n_frames_toss = int(params[4]) / 1000 umb_params.append([qtanh_center, kumb, n_frames_toss]) if not os.path.exists("qtanh.npy"): pairs = np.loadtxt( "umbrella_params", usecols=(0, 1), dtype=int, skiprows=1) - 1 r0 = np.loadtxt("umbrella_params", usecols=(2, ), skiprows=1) widths = (2. / gamma) * np.ones(len(pairs)) qtanhsum_obs = observables.TanhContactSum("conf.gro", pairs, 1.2 * r0, widths) qtanh_temp = observables.calculate_observable( ["traj.xtc"], qtanhsum_obs)[0][n_frames_toss:] np.save("qtanh.npy", qtanh_temp) else: qtanh_temp = np.load("qtanh.npy") # collect needed info for this umbrella Ebias = 0.5 * kumb * ((qtanh_temp - qtanh_center)**2) Epot = np.loadtxt("Epot.dat", usecols=(1, ))[n_frames_toss:] E_no_bias_all.append(Epot - Ebias) qtanh_all.append(qtanh_temp) N_k_bias.append(qtanh_temp.shape[0])
traj = md.load("traj.xtc", top="conf.gro") qtanh = md.compute_distances(traj, np.array([[0,57]])) with open("umbrella_params", "r") as fin: params = fin.readline().split() q0 = float(params[0]) kumb = float(params[1]) gamma = float(params[2]) n_frames_toss = int(params[4])/1000 pairs = np.loadtxt("umbrella_params", usecols=(0,1), dtype=int, skiprows=1) - 1 r0 = np.loadtxt("umbrella_params", usecols=(2,), skiprows=1) widths = (2./gamma)*np.ones(len(pairs)) qtanhsum_obs = observables.TanhContactSum("conf.gro", pairs, 1.2*r0, widths) qtanh = observables.calculate_observable(["traj.xtc"], qtanhsum_obs) np.save("qtanh.npy", qtanh) else: qtanh = np.load("qtanh.npy") n, bins = np.histogram(qtanh, bins=n_bins) mid_bin = 0.5*(bins[1:] + bins[:-1]) pmf = -np.log(n) pmf -= pmf.min() plt.plot(mid_bin, pmf, label="$Q_0 = {}$".format(Q0[i])) os.chdir("..") plt.xlim(0, 150)
nn_pairs = np.loadtxt("%s/pairwise_params" % dir, usecols=(0,1))[2*n_native_pairs + 1::2] - 1 nn_r0 = np.loadtxt("%s/pairwise_params" % dir, usecols=(4,))[2*n_native_pairs + 1::2] nn_r0_cont = nn_r0 + 0.1 widths = 0.05 top = "%s/Native.pdb" % dir if all([ (os.path.exists("%s/Atanh_0_05.dat" % x.split("/")[0]) & os.path.exists("%s/Qtanh_0_05.dat" % x.split("/")[0])) \ for x in trajfiles ]): qtanh = [ np.loadtxt("%s/Qtanh_0_05.dat" % x.split("/")[0]) for x in trajfiles ] Atanh = [ np.loadtxt("%s/Atanh_0_05.dat" % x.split("/")[0]) for x in trajfiles ] else: qtanhsum_obs = observables.TanhContactSum(top, pairs, r0_cont, widths) qtanh = observables.calculate_observable(trajfiles, qtanhsum_obs, saveas="Qtanh_0_05.dat") Atanhsum_obs = observables.TanhContactSum(top, nn_pairs, nn_r0_cont, widths) Atanh = observables.calculate_observable(trajfiles, Atanhsum_obs, saveas="Atanh_0_05.dat") q = np.concatenate(qtanh) A = np.concatenate(Atanh) n,bin_edges = np.histogram(q,bins=40) mid_bin = 0.5*(bin_edges[1:] + bin_edges[:-1]) A_bin_avg = np.zeros(len(bin_edges) - 1,float) dA2_bin_avg = np.zeros(len(bin_edges) - 1,float) for i in range(len(bin_edges) - 1): frames_in_this_bin = ((q > bin_edges[i]) & (q <= bin_edges[i+1])) if any(frames_in_this_bin): A_bin_avg[i] = np.mean(A[frames_in_this_bin])
import simulation.calc.observables as observables if __name__ == "__main__": trajfiles = [ x.rstrip("\n") for x in open("ticatrajs","r").readlines() ] dir = os.path.dirname(trajfiles[0]) nat_pairs = np.loadtxt("%s/native_contacts.ndx" % dir, skiprows=1, dtype=int) - 1 n_native_pairs = nat_pairs.shape[0] top = "%s/Native.pdb" % dir with open(top, "r") as fin: n_residues = len(fin.readlines()) - 1 end_pairs = np.array([[0, n_residues - 1]]) r_obs = observables.Distances(top,end_pairs) r = np.concatenate(observables.calculate_observable(trajfiles, r_obs)) # Get probability density of end-to-end distance of the unfolded state qtanh = np.concatenate([ np.loadtxt("%s/Qtanh_0_05.dat" % os.path.dirname(x)) for x in trajfiles ]) minima = np.loadtxt("Qtanh_0_05_profile/minima.dat")[0] U = minima + 0.1*n_native_pairs n, bins = np.histogram(r[qtanh < U],bins=40,density=True) mid_bin = 0.5*(bins[1:] + bins[:-1]) if not os.path.exists("r1N_distribution"): os.mkdir("r1N_distribution") os.chdir("r1N_distribution") np.savetxt("r1N_vs_bin.dat", n) np.savetxt("mid_bin.dat", mid_bin)
r0_cont = r0 + 0.1 nn_pair_type = np.loadtxt("%s/pairwise_params" % dir, usecols=(3,), dtype=int)[2*n_native_pairs + 1::2] nn_pairs = np.loadtxt("%s/pairwise_params" % dir, usecols=(0,1))[2*n_native_pairs + 1::2] - 1 nn_eps = np.loadtxt("%s/model_params" % dir)[2*n_native_pairs + 1::2] nn_r0 = np.loadtxt("%s/pairwise_params" % dir, usecols=(4,))[2*n_native_pairs + 1::2] nn_r0_cont = nn_r0 + 0.1 widths = 0.05 top = "%s/Native.pdb" % dir if all([ os.path.exists("%s/Qtanh_0_05.dat" % x.split("/")[0]) for x in trajfiles ]): qtanh = [ np.loadtxt("%s/Qtanh_0_05.dat" % x.split("/")[0]) for x in trajfiles ] else: qtanhsum_obs = observables.TanhContactSum(top, pairs, r0_cont, widths) qtanh = observables.calculate_observable(trajfiles, qtanhsum_obs, saveas="Qtanh_0_05.dat") if all([ os.path.exists("%s/Enonnative.dat" % x.split("/")[0]) for x in trajfiles ]): Enn = [ np.loadtxt("%s/Enonnative.dat" % x.split("/")[0]) for x in trajfiles ] else: Enn_obs = observables.PairEnergySum(top, nn_pairs, nn_pair_type, nn_eps, nn_pair_params) Enn = observables.calculate_observable(trajfiles, Enn_obs, saveas="Enonnative.dat") q = np.concatenate(qtanh) E = np.concatenate(Enn) n,bin_edges = np.histogram(q,bins=40) mid_bin = 0.5*(bin_edges[1:] + bin_edges[:-1]) E_bin_avg = np.zeros(len(bin_edges) - 1,float) dE2_bin_avg = np.zeros(len(bin_edges) - 1,float)
with open("umbrella_params", "r") as fin: params = fin.readline().split() q0 = float(params[0]) kumb = float(params[1]) gamma = float(params[2]) n_frames_toss = int(params[4]) / 1000 pairs = np.loadtxt( "umbrella_params", usecols=(0, 1), dtype=int, skiprows=1) - 1 r0 = np.loadtxt("umbrella_params", usecols=(2, ), skiprows=1) widths = (2. / gamma) * np.ones(len(pairs)) qtanhsum_obs = observables.TanhContactSum("conf.gro", pairs, 1.2 * r0, widths) qtanh = observables.calculate_observable(["traj.xtc"], qtanhsum_obs) np.save("qtanh.npy", qtanh) else: qtanh = np.load("qtanh.npy") n, bins = np.histogram(qtanh, bins=n_bins) mid_bin = 0.5 * (bins[1:] + bins[:-1]) pmf = -np.log(n) pmf -= pmf.min() plt.plot(mid_bin, pmf, label="$Q_0 = {}$".format(Q0[i])) os.chdir("..") plt.xlim(0, 150)