def test_metropolis(nconfig=1000, ndim=3, nelec=2, nstep=100, tau=0.5): # This part of the code will test your implementation. # You can modify the parameters to see how they affect the results. from slaterwf import ExponentSlaterWF from hamiltonian import Hamiltonian wf = ExponentSlaterWF(alpha=1.0) ham = Hamiltonian(Z=1) possample = np.random.randn(nelec, ndim, nconfig) possample, acc = metropolis_sample(possample, wf, tau=tau, nstep=nstep) # calculate kinetic energy ke = -0.5 * np.sum(wf.laplacian(possample), axis=0) # calculate potential energy vion = ham.pot_en(possample) # The local energy. eloc = ke + vion # report print("Cycle finished; acceptance = {acc:3.2f}.".format(acc=acc)) for nm, quant, ref in zip(['kinetic', 'Electron-nucleus', 'total'], [ke, vion, eloc], [1.0, -2.0, -1.0]): avg = np.mean(quant) err = np.std(quant) / np.sqrt(nconfig) print("{name:20s} = {avg:10.6f} +- {err:8.6f}; reference = {ref:5.2f}". format(name=nm, avg=avg, err=err, ref=ref))
# Best Slater determinant. n = 51 ast = 1.5 aen = 2.5 bst = -0.5 ben = 1.5 alphas = np.linspace(ast, aen, n) betas = np.linspace(bst, ben, n) for alpha in alphas: ewf = ExponentSlaterWF(alpha=alpha) pos = np.random.randn(nelec, ndim, nconfig) pos, _ = metropolis_sample(pos=pos, wf=ewf, tau=tau, nstep=nstep) acc.append(_) ke.append(np.mean(-0.5 * np.sum(ewf.laplacian(pos), axis=0))) vele.append(np.mean(ham.pot_ee(pos))) venu.append(np.mean(ham.pot_en(pos))) potential.append(np.mean(ham.pot(pos))) eloc.append(ke[-1] + potential[-1]) virial.append(potential[-1] / ke[-1]) fig = plt.figure(1) ax = fig.add_subplot(111) ax.plot(alphas, eloc) x = alphas popt, pcov = curve_fit(func, x, eloc) a, b, c = popt x = np.linspace(ast, aen, n * 10) y = func(x, a, b, c) minidx = np.argmin(y) ax.plot(x, y) ax.axvline(x=x[minidx], color='k') ax.axhline(y=y[minidx], color='k')
df[i] = [] for i in ['alpha', 'beta', 'acceptance']: df[i] = [] ham = Hamiltonian(Z=2) # Helium # Best Slater determinant. beta = 0.0 # For book keeping. for alpha in np.linspace(1.5, 2.5, 11): wf = ExponentSlaterWF(alpha=alpha) sample, acc = metropolis_sample(np.random.randn(nelec, ndim, nconfig), wf, tau=tau, nstep=nstep) ke = -0.5 * np.sum(wf.laplacian(sample), axis=0) vion = ham.pot_en(sample) vee = ham.pot_ee(sample) for i in range(nconfig): for nm, quant in zip(quantities, [ke, vion, vee]): df[nm].append(quant[i]) df['alpha'].append(alpha) df['beta'].append(beta) df['acceptance'].append(acc) # Best Slater-Jastrow. alpha = 2.0 for beta in np.linspace(-0.5, 1.5, 11): wf = MultiplyWF(ExponentSlaterWF(alpha=alpha), JastrowWF(a_ee=beta)) sample, acc = metropolis_sample(np.random.randn(nelec, ndim, nconfig), wf,