Beispiel #1
0
run_params['verbosity'] = 3
# Number of particular solutions to accumulate
run_params['l'] = 5000
# Number of global updates
run_params['f'] = 100
# Number of local updates per global update
run_params['t'] = 50
# Accumulate histogram of the objective function values
run_params['make_histograms'] = True

# Read \chi(i\omega_n) from archive
# Could be \chi(\tau) or \chi_l as well.
chi_iw = HDFArchive('example.h5', 'r')['chi_iw']

# Set the weight function S to a constant (all points of chi_iw are equally important)
S = chi_iw.copy()
S.data[:] = 1.0

# Estimated norms of spectral functions, (\pi/2) * \chi(i\omega_0)
norms = (numpy.pi / 2) * numpy.array(
    [chi_iw(0).real[0, 0], chi_iw(0).real[1, 1]])

# Construct a SOM object
cont = Som(chi_iw, S, kind="BosonAutoCorr", norms=norms)

# Run!
# Takes 2-3 minutes on 4 cores ...
cont.run(**run_params)

# Evaluate the solution on an energy mesh
# NB: we can use *any* energy window at this point, not necessarily that from run_params
Beispiel #2
0
run_params['verbosity'] = 3
# Number of particular solutions to accumulate
run_params['l'] = 5000
# Number of global updates
run_params['f'] = 100
# Number of local updates per global update
run_params['t'] = 50
# Accumulate histogram of the objective function values
run_params['make_histograms'] = True

# Read \chi(i\omega_n) from archive
# Could be \chi(\tau) or \chi_l as well.
chi_iw = HDFArchive('example.h5', 'r')['chi_iw']

# Set the weight function S to a constant (all points of chi_iw are equally important)
S = chi_iw.copy()
S.data[:] = 1.0

# Estimated norms of spectral functions, (\pi/2) * \chi(i\omega_0)
norms = (numpy.pi/2) * numpy.array([chi_iw(0).real[0,0],
                                    chi_iw(0).real[1,1]])

# Construct a SOM object
cont = Som(chi_iw, S, kind = "BosonAutoCorr", norms = norms)

# Run!
# Takes 2-3 minutes on 4 cores ...
cont.run(**run_params)

# Evaluate the solution on an energy mesh
# NB: we can use *any* energy window at this point, not necessarily that from run_params
Beispiel #3
0
run_params['verbosity'] = 3
# Number of particular solutions to accumulate
run_params['l'] = 5000
# Number of global updates
run_params['f'] = 100
# Number of local updates per global update
run_params['t'] = 50
# Accumulate histogram of the objective function values
run_params['make_histograms'] = True

# Read g(\tau) from archive
# Could be g(i\omega_n) or g_l as well.
g_tau = HDFArchive('example.h5', 'r')['g_tau']

# Set the weight function S to a constant (all points of g_iw are equally important)
S = g_tau.copy()
S.data[:] = 1.0

# Construct a SOM object
# Norm of spectral function is known to be 0.5
cont = Som(g_tau, S, kind="ZeroTemp", norms=numpy.array([0.5]))

# Run!
# Takes 1-2 minutes on 16 cores ...
cont.run(**run_params)

# Evaluate the solution on an energy mesh
# NB: we can use *any* energy window at this point, not necessarily that from run_params
g_w = GfReFreq(window=(0, 10.0), n_points=n_w, indices=[0])
g_w << cont