def test_msd_global_temp(self): """Tests diffusion via MSD for global gamma and temperature""" gamma = 9.4 kT = 0.37 dt = 0.5 system = self.system system.part.clear() p = system.part.add(pos=(0, 0, 0), id=0) system.time_step = dt system.thermostat.set_brownian(kT=kT, gamma=gamma, seed=42) system.cell_system.skin = 0.4 pos_obs = ParticlePositions(ids=(p.id, )) c_pos = Correlator(obs1=pos_obs, tau_lin=16, tau_max=100., delta_N=10, corr_operation="square_distance_componentwise", compress1="discard1") system.auto_update_accumulators.add(c_pos) system.integrator.run(500000) c_pos.finalize() # Check MSD msd = c_pos.result() system.auto_update_accumulators.clear() def expected_msd(x): return 2. * kT / gamma * x for i in range(2, 6): np.testing.assert_allclose(msd[i, 2:5], expected_msd(msd[i, 0]), rtol=0.02)
def test_06__diffusion(self): """This tests rotational and translational diffusion coeff via Green-Kubo""" system = self.system system.part.clear() kT = 1.37 dt = 0.1 system.time_step = dt # Translational gamma. We cannot test per-component, if rotation is on, # because body and space frames become different. gamma = 3.1 # Rotational gamma gamma_rot_i = 4.7 gamma_rot_a = [4.2, 1, 1.2] # If we have langevin per particle: # per particle kT per_part_kT = 1.6 # Translation per_part_gamma = 1.63 # Rotational per_part_gamma_rot_i = 2.6 per_part_gamma_rot_a = [2.4, 3.8, 1.1] # Particle with global thermostat params p_global = system.part.add(pos=(0, 0, 0)) # Make sure, mass doesn't change diff coeff self.setup_diff_mass_rinertia(p_global) # particle specific gamma, kT, and both if espressomd.has_features("LANGEVIN_PER_PARTICLE"): p_gamma = system.part.add(pos=(0, 0, 0)) self.setup_diff_mass_rinertia(p_gamma) if espressomd.has_features("PARTICLE_ANISOTROPY"): p_gamma.gamma = per_part_gamma, per_part_gamma, per_part_gamma if espressomd.has_features("ROTATION"): p_gamma.gamma_rot = per_part_gamma_rot_a else: p_gamma.gamma = per_part_gamma if espressomd.has_features("ROTATION"): p_gamma.gamma_rot = per_part_gamma_rot_i p_kT = system.part.add(pos=(0, 0, 0)) self.setup_diff_mass_rinertia(p_kT) p_kT.temp = per_part_kT p_both = system.part.add(pos=(0, 0, 0)) self.setup_diff_mass_rinertia(p_both) p_both.temp = per_part_kT if espressomd.has_features("PARTICLE_ANISOTROPY"): p_both.gamma = per_part_gamma, per_part_gamma, per_part_gamma if espressomd.has_features("ROTATION"): p_both.gamma_rot = per_part_gamma_rot_a else: p_both.gamma = per_part_gamma if espressomd.has_features("ROTATION"): p_both.gamma_rot = per_part_gamma_rot_i # Thermostat setup if espressomd.has_features("ROTATION"): if espressomd.has_features("PARTICLE_ANISOTROPY"): # particle anisotropy and rotation system.thermostat.set_langevin(kT=kT, gamma=gamma, gamma_rotation=gamma_rot_a, seed=41) else: # Rotation without particle anisotropy system.thermostat.set_langevin(kT=kT, gamma=gamma, gamma_rotation=gamma_rot_i, seed=41) else: # No rotation system.thermostat.set_langevin(kT=kT, gamma=gamma, seed=41) system.cell_system.skin = 0.4 system.integrator.run(100) # Correlators vel_obs = {} omega_obs = {} corr_vel = {} corr_omega = {} all_particles = [p_global] if espressomd.has_features("LANGEVIN_PER_PARTICLE"): all_particles.append(p_gamma) all_particles.append(p_kT) all_particles.append(p_both) # linear vel vel_obs = ParticleVelocities(ids=system.part[:].id) corr_vel = Correlator(obs1=vel_obs, tau_lin=10, tau_max=1.4, delta_N=2, corr_operation="componentwise_product", compress1="discard1") system.auto_update_accumulators.add(corr_vel) # angular vel if espressomd.has_features("ROTATION"): omega_obs = ParticleBodyAngularVelocities(ids=system.part[:].id) corr_omega = Correlator(obs1=omega_obs, tau_lin=10, tau_max=1.5, delta_N=2, corr_operation="componentwise_product", compress1="discard1") system.auto_update_accumulators.add(corr_omega) system.integrator.run(80000) system.auto_update_accumulators.remove(corr_vel) corr_vel.finalize() if espressomd.has_features("ROTATION"): system.auto_update_accumulators.remove(corr_omega) corr_omega.finalize() # Verify diffusion # Translation # Cast gammas to vector, to make checks independent of # PARTICLE_ANISOTROPY gamma = np.ones(3) * gamma per_part_gamma = np.ones(3) * per_part_gamma self.verify_diffusion(p_global, corr_vel, kT, gamma) if espressomd.has_features("LANGEVIN_PER_PARTICLE"): self.verify_diffusion(p_gamma, corr_vel, kT, per_part_gamma) self.verify_diffusion(p_kT, corr_vel, per_part_kT, gamma) self.verify_diffusion(p_both, corr_vel, per_part_kT, per_part_gamma) # Rotation if espressomd.has_features("ROTATION"): # Decide on effective gamma rotation, since for rotation it is # direction dependent eff_gamma_rot = None per_part_eff_gamma_rot = None if espressomd.has_features("PARTICLE_ANISOTROPY"): eff_gamma_rot = gamma_rot_a eff_per_part_gamma_rot = per_part_gamma_rot_a else: eff_gamma_rot = gamma_rot_i * np.ones(3) eff_per_part_gamma_rot = per_part_gamma_rot_i * np.ones(3) self.verify_diffusion(p_global, corr_omega, kT, eff_gamma_rot) if espressomd.has_features("LANGEVIN_PER_PARTICLE"): self.verify_diffusion(p_gamma, corr_omega, kT, eff_per_part_gamma_rot) self.verify_diffusion(p_kT, corr_omega, per_part_kT, eff_gamma_rot) self.verify_diffusion(p_both, corr_omega, per_part_kT, eff_per_part_gamma_rot)
tau_max=100., delta_N=10, corr_operation="square_distance_componentwise", compress1="discard1") c_vel = Correlator(obs1=vel_obs, tau_lin=16, tau_max=20., delta_N=1, corr_operation="scalar_product", compress1="discard1") system.auto_update_accumulators.add(c_pos) system.auto_update_accumulators.add(c_vel) system.integrator.run(1000000) c_pos.finalize() c_vel.finalize() np.savetxt("msd.dat", c_pos.result()) np.savetxt("vacf.dat", c_vel.result()) # Integral of vacf via Green-Kubo # D= 1/3 int_0^infty <v(t_0)v(t_0+t)> dt vacf = c_vel.result() # Integrate with trapezoidal rule I = np.trapz(vacf[:, 2], vacf[:, 0]) ratio = 1. / 3. * I / (kT / gamma) print("Ratio of measured and expected diffusion coefficients from Green-Kubo:", ratio)
# correlator ang_id = ParticleAngularVelocities(ids=[0]) avacf = Correlator(obs1=ang_id, corr_operation="scalar_product", delta_N=1, tau_max=tmax, tau_lin=16) system.auto_update_accumulators.add(avacf) # Integrate 5,000,000 steps. This can be done in one go as well. for i in range(SAMP_STEPS): if (i + 1) % 100 == 0: print('\rrun %i: %.0f%%' % (run + 1, (i + 1) * 100. / SAMP_STEPS), end='', flush=True) system.integrator.run(SAMP_LENGTH) print() # Finalize the accumulators and write to disk system.auto_update_accumulators.remove(msd) msd.finalize() np.savetxt("{}/msd_{}_{}.dat".format(outdir, vel, run), msd.result()) system.auto_update_accumulators.remove(vacf) vacf.finalize() np.savetxt("{}/vacf_{}_{}.dat".format(outdir, vel, run), vacf.result()) system.auto_update_accumulators.remove(avacf) avacf.finalize() np.savetxt("{}/avacf_{}_{}.dat".format(outdir, vel, run), avacf.result())
def calc(var): # AVB: Create an output directory for this to store the output files outdir = "./Noelle/r01.5kBT4Ads/1000=3.2" if not os.path.exists(outdir): os.makedirs(outdir) # Setup constant time_step = 0.01 loops = 30 step_per_loop = 100 # AVB: the parameters (that I usually use) a = 0.05 r0 = 2.0 * a kBT = 4.0e-6 vwf_type = 0 collagen_type = 1 monomer_mass = 0.01 box_l = 32.0 #print("Shear velocity:") #shear_velocity = float(input()) #vy = box_l*shear_velocity vy = var print(vy) v = [0, vy, 0] # System setup system = 0 system = System(box_l=[box_l, box_l, box_l]) system.set_random_state_PRNG() np.random.seed(seed=system.seed) system.cell_system.skin = 0.4 mpc = 20 # The number of monomers has been set to be 20 as default # Change this value for further simulations # Fene interaction fene = interactions.FeneBond(k=0.04, d_r_max=0.3) system.bonded_inter.add(fene) # Setup polymer of part_id 0 with fene bond # AVB: Notice the mode, max_tries and shield parameters for pruned self-avoiding random walk algorithm polymer.create_polymer(N_P=1, MPC=mpc, bond=fene, bond_length=r0, start_pos=[29.8, 16.0, 16.0], mode=2, max_tries=100, shield=0.6 * r0) # AVB: setting the type of particles and changing mass of each monomer to 0.01 system.part[:].type = vwf_type system.part[:].mass = monomer_mass # AVB: I suggest to add Lennard-Jones interaction between the monomers # AVB: to reproduce hydrophobicity # AVB: parameters for the potential (amplitude and cut-off redius) amplVwfVwf = 4.0 * kBT # sometimes we change this to 2.0*kBT rcutVwfVwf = 1.5 * r0 # AVB: the potential system.non_bonded_inter[vwf_type, vwf_type].lennard_jones.set_params( epsilon=amplVwfVwf, sigma=r0 / 1.122, shift="auto", cutoff=rcutVwfVwf, min=r0 * 0.6) print("Warming up the polymer chain.") ## For longer chains (>100) an extensive ## warmup is neccessary ... system.time_step = 0.002 system.thermostat.set_langevin(kT=4.0e-6, gamma=1.0) # AVB: Here the Langevin thermostat is needed, because we have not yet initialized the LB-fluid. # AVB: And somehow it is necessary so that the polymer adopts the equilibrium conformation of the globule. # AVB: you may skip this step for i in range(100): system.force_cap = float(i) + 1 system.integrator.run(100) print("Warmup finished.") system.force_cap = 0 system.integrator.run(100) system.time_step = time_step system.integrator.run(500) # AVB: the following command turns the Langevin thermostat on in line 49 system.thermostat.turn_off() # AVB: This command sets the velocities of all particles to zero system.part[:].v = [0, 0, 0] # AVB: The density was too small here. I have set 1.0 for now. # AVB: It would be necessary to recalculate, but the density of the liquid should not affect the movements of the polymer (this is how our physical model works). lbf = espressomd.lb.LBFluid(agrid=1, dens=1.0, visc=1.0e2, tau=time_step, fric=0.01) system.actors.add(lbf) system.thermostat.set_lb(kT=4.0e-6) # Setup boundaries walls = [lbboundaries.LBBoundary() for k in range(2)] walls[0].set_params(shape=shapes.Wall(normal=[1, 0, 0], dist=1.5), velocity=v) walls[1].set_params(shape=shapes.Wall(normal=[-1, 0, 0], dist=-30.5)) for wall in walls: system.lbboundaries.add(wall) print("Warming up the system with LB fluid.") system.integrator.run(5000) print("LB fluid warming finished.") # AVB: after this you should have a completely collapsed polymer globule # AVB: If you want to watch the process of globule formation in Paraview, just change 5000 to 0 in line 100 N = 25 x_coord = np.array([30] * N) y_coord = np.arange(14, 24, 5 / N) z_coord = np.arange(14, 24, 5 / N) for i in range(N): for j in range(N): system.part.add(id=i * N + j + 100, pos=np.array([x_coord[i], y_coord[j], z_coord[i]]), v=np.array([0, 0, 0]), type=i * N + j + 100) all_collagen = range(100, (N - 1) * N + (N - 1) + 100) system.comfixed.types = all_collagen for i in range(100, (N - 1) * N + (N - 1) + 100): system.non_bonded_inter[vwf_type, i].lennard_jones.set_params(epsilon=amplVwfVwf, sigma=r0 / 1.122, shift="auto", cutoff=rcutVwfVwf, min=r0 * 0.6) # configure correlators com_pos = ComPosition(ids=(0, )) c = Correlator(obs1=com_pos, tau_lin=16, tau_max=loops * step_per_loop, delta_N=1, corr_operation="square_distance_componentwise", compress1="discard1") system.auto_update_accumulators.add(c) print("Sampling started.") print("lenth after warmup") print( system.analysis.calc_re(chain_start=0, number_of_chains=1, chain_length=mpc - 1)[0]) lengths = [] ylengths = [] for i in range(loops): system.integrator.run(step_per_loop) system.analysis.append() lengths.append( system.analysis.calc_re(chain_start=0, number_of_chains=1, chain_length=mpc - 1)[0]) lbf.print_vtk_velocity(outdir + "/" + str(vy) + "%04i.vtk" % i) system.part.writevtk(outdir + "/" + str(vy) + "vwf_all%04i.vtk" % i, types=all_collagen) system.part.writevtk(outdir + "/" + str(vy) + "vwf_poly%04i.vtk" % i, types=[0]) cor = list(system.part[:].pos) y = [] for l in cor: y.append(l[1]) ylengths.append(max(y) - min(y)) sys.stdout.write("\rSampling: %05i" % i) sys.stdout.flush() walls[0].set_params(shape=shapes.Wall(normal=[1, 0, 0], dist=1.5)) walls[1].set_params(shape=shapes.Wall(normal=[-1, 0, 0], dist=-30.5)) for i in range(100): system.integrator.run(step_per_loop) lengths.append( system.analysis.calc_re(chain_start=0, number_of_chains=1, chain_length=mpc - 1)[0]) system.part.writevtk(outdir + "/" + str(vy) + "vwf_all[r0=2,kBT=4]intheEND.vtk") with open(outdir + "/lengths" + str(vy) + ".dat", "a") as datafile: datafile.write("\n".join(map(str, lengths))) with open(outdir + "/lengthsY" + str(vy) + ".dat", "a") as datafile: datafile.write("\n".join(map(str, ylengths))) mean_vy = [(vy * 10000) / 32, sum(ylengths) / len(ylengths)] print("mean_vy") print(mean_vy) with open(outdir + "/mean_vy" + "2kBT_2r0" + ".dat", "a") as datafile: datafile.write(" ".join(map(str, mean_vy))) c.finalize() corrdata = c.result() corr = zeros((corrdata.shape[0], 2)) corr[:, 0] = corrdata[:, 0] corr[:, 1] = (corrdata[:, 2] + corrdata[:, 3] + corrdata[:, 4]) / 3 savetxt(outdir + "/msd_nom" + str(mpc) + ".dat", corr) with open(outdir + "/rh_out.dat", "a") as datafile: rh = system.analysis.calc_rh(chain_start=0, number_of_chains=1, chain_length=mpc - 1) datafile.write(str(mpc) + " " + str(rh[0]) + "\n")
tau_lin=16) system.auto_update_accumulators.add(msd) ## Exercise 3 ## # Construct the auto-accumulators for the VACF and AVACF, # using the example of the MSD # Initialize the velocity auto-correlation function (VACF) correlator ... # Initialize the angular velocity auto-correlation function (AVACF) # correlator ... # Integrate 5,000,000 steps. This can be done in one go as well. for i in range(sampsteps): system.integrator.run(samplength) # Finalize the accumulators and write to disk system.auto_update_accumulators.remove(msd) msd.finalize() np.savetxt("{}/msd_{}_{}.dat".format(outdir, vel, run), msd.result()) ... ...
old_stress.flush() kin_stress.write(str(stress['kinetic'][0, 1]) + "\n") kin_stress.flush() temp.write(str(energy['kinetic']) + "\n") temp.flush() ener.write(str(energy['total']) + "\n") ener.flush() print(i, flush=True) system.integrator.run(iterations) # Finalizing and results of correlators c_dpd.finalize() c_old.finalize() dpd_stress_acf = c_dpd.result() old_stress_acf = c_old.result() #Saving the results of correlators in numpy-files np.save('dpd_sample_dpd_stress_acf.npy', dpd_stress_acf) np.save('dpd_sample_old_stress_acf.npy', old_stress_acf) # Close non-correlated stress files dpd_stress.close() old_stress.close() kin_stress.close() temp.close() ener.close()
for pt in system.part.select(lambda p: True): force = (pt.f[0]**2 + pt.f[1]**2 + pt.f[2]**2)**(1 / 2) if force > max: max = force print(max) from espressomd import electrostatics p3m = electrostatics.P3M(prefactor=1.0, accuracy=1e-2) system.actors.add(p3m) checkpoint.register("p3m") fp.close() # Finalize the correlator and obtain the results msd_corr.finalize() msd = msd_corr.result() # STEP 6 import matplotlib.pyplot as plt fig1 = plt.figure(num=None, figsize=(10, 6), dpi=80, facecolor='w', edgecolor='k') fig1.set_tight_layout(False) plt.plot(r00, avg_rdf00, '-', color="#A60628", linewidth=2, alpha=1) plt.plot(r11, avg_rdf11, '-', color="#1528b5", linewidth=2, alpha=1) plt.plot(r01, avg_rdf01, '-', color="#0dbf22", linewidth=2, alpha=1) plt.xlabel('$r$', fontsize=20)
c = Correlator(obs1=com_pos, tau_lin=16, tau_max=loops * step_per_loop, delta_N=1, corr_operation="square_distance_componentwise", compress1="discard1") system.auto_update_accumulators.add(c) print("Sampling started.") for i in range(loops): system.integrator.run(step_per_loop) system.analysis.append() lbf.print_vtk_velocity(outdir + "/fluid%04i.vtk" % i) system.part.writevtk(outdir + "/vwf_all%04i.vtk" % i) sys.stdout.write("\rSampling: %05i" % i) sys.stdout.flush() c.finalize() corrdata = c.result() corr = zeros((corrdata.shape[0], 2)) corr[:, 0] = corrdata[:, 0] corr[:, 1] = (corrdata[:, 2] + corrdata[:, 3] + corrdata[:, 4]) / 3 savetxt(outdir + "/msd_nom" + str(mpc) + ".dat", corr) with open(outdir + "/rh_out.dat", "a") as datafile: rh = system.analysis.calc_rh(chain_start=0, number_of_chains=1, chain_length=mpc - 1) datafile.write(str(mpc) + " " + str(rh[0]) + "\n")