Ejemplo n.º 1
0
 def __init__(self, archive, *args, **kwargs):
     self.archive = archive
     if not os.path.exists(archive) and mpi.is_master_node():
         archive = HDFArchive(archive, 'w')
         archive.create_group('results')
         archive['results']['n_dmft_loops'] = 0
         del archive
     mpi.barrier()
Ejemplo n.º 2
0
 def next_loop(self):
     """returns the DMFT loop nr. of the next loop"""
     archive = HDFArchive(self.archive, 'r')
     if archive.is_group('results'):
         nl = archive['results']['n_dmft_loops']
     else:
         nl = 0
     del archive
     return nl
Ejemplo n.º 3
0
def check_quantity(file_ref, quantity_name):

    file_new = "data_from_scratch/iteration_000.h5"

    results_ref =  HDFArchive(file_ref,'r')
    results_new =  HDFArchive(file_new,'r')

    quantity_ref = results_ref[quantity_name]
    quantity_new = results_new[quantity_name]

    print 'checking quantity ', quantity_name, '...'
    assert_block_gfs_are_close(quantity_new, quantity_ref, precision = 1e-10), \
Ejemplo n.º 4
0
def SIAM(U, e_f, V, D, beta, filename="qmc_results.h5"):
    # Create hybridization function
    Delta = V**2 * Flat(D)

    # Construct the impurity solver with the inverse temperature
    # and the structure of the Green's functions
    S = Solver(beta=beta, gf_struct={'up': [0], 'down': [0]}, n_l=50)

    # Initialize the non-interacting Green's function S.G0_iw
    for name, g0 in S.G0_iw:
        g0 << inverse(iOmega_n - e_f - Delta)

    # Run the solver. The results will be in S.G_tau, S.G_iw and S.G_l
    S.solve(
        h_int=U * n('up', 0) * n('down', 0),  # Local Hamiltonian
        n_cycles=2000000,  # Number of QMC cycles
        length_cycle=50,  # Length of one cycle
        n_warmup_cycles=20000,  # Warmup cycles
        measure_g_l=
        True,  # Measure G_l (representation of G in terms of Legendre polynomials)
        use_norm_as_weight=
        True,  # Necessary option for the measurement of the density matrix
        measure_density_matrix=True,  # Measure reduced impurity density matrix
        measure_pert_order=True)  # Measure histogram of k

    # Save the results in an HDF5 file (only on the master node)
    if mpi.is_master_node():
        with HDFArchive(filename, 'w') as Results:
            Results["G_tau"] = S.G_tau
            Results["G_iw"] = S.G_iw
            Results["G_l"] = S.G_l
            Results["rho"] = S.density_matrix
            Results["k_histogram"] = S.perturbation_order_total
            Results["average_sign"] = S.average_sign
Ejemplo n.º 5
0
def readold_sigma_iw_list(oldfile):

    if rank == 0:

        print 'oldfile', oldfile
        results = HDFArchive(oldfile, 'r')

        Sigma_iw_list = []

        n_iw_new = results["Sigma_iw___at_0/bl/mesh/size"]
        iw_mesh_new = MeshImFreq(beta, 'Fermion', n_iw_new / 2)
        ### n_iw for MeshImFreq is positive number of frequencies,
        ### when read out from hdf-file it is total number of freqs.

        for i in range(0, N_atoms):

            dataname = "Sigma_iw___at_" + str(i)
            tmp = results[dataname]

            S = BlockGf(mesh=iw_mesh_new, gf_struct=gf_struct)
            S["bl"].data[...] = tmp["bl"].data[...]

            Sigma_iw_list.append(S)

    else:
        Sigma_iw_list = None

    Sigma_iw_list = world.bcast(Sigma_iw_list, root=0)

    return Sigma_iw_list
Ejemplo n.º 6
0
def SIAM(U, e_f, V, D, beta, filename="qmc_results.h5"):
    Delta = V**2 * Flat(D)
    N_MC = 1e5
    l_max = 10
    independent_samples = 16
    for l in range(l_max + 1):
        for i in range(independent_samples):
            S = Solver(beta=beta, gf_struct={'up': [0], 'down': [0]})
            # Initialize the non-interacting Green's function S.G0_iw
            for name, g0 in S.G0_iw:
                g0 << inverse(iOmega_n - e_f - Delta)
            # Run the solver. The results will be in S.G_tau, S.G_iw and S.G_l
            S.solve(
                h_int=U * n('up', 0) * n('down', 0),  # Local Hamiltonian
                n_cycles=int(N_MC / 2**l),  # Number of QMC cycles
                length_cycle=2**l,  # Length of one cycle
                n_warmup_cycles=int(N_MC / 2**l / 100),  #  Warmup cycles
                measure_g_tau=False,  #  Don't measure G_tau
                measure_g_l=False,  #  Don't measure G_l
                perform_post_proc=False,  #  Don't measure G_iw
                use_norm_as_weight=
                True,  # Necessary option for the measurement of the density matrix
                measure_density_matrix=
                True,  # Measure reduced impurity density matrix
                random_seed=i * 8521 + l * 14187 +
                mpi.rank * 7472)  # Random seed, very important!
            # Save the results in an HDF5 file (only on the master node)
            if mpi.is_master_node():
                with HDFArchive(filename) as Results:
                    Results["rho_l{}_i{}".format(l, i)] = S.density_matrix
Ejemplo n.º 7
0
def calc_field(plot=True):

    filenames = glob.glob('data_pyed_h_field*.h5')

    out = ParameterCollection()
    d = ParameterCollection(data=[])
    h_vec, m_vec, m_ref_vec = [], [], []

    for filename in filenames:

        print '--> Loading:', filename

        with HDFArchive(filename, 'r') as s:
            p = s['p']
            d.data.append(p)
            h_vec.append(p.h_field)

            m = 0.5 * (-p.G_tau['up'](p.beta) + p.G_tau['dn'](p.beta))
            m_vec.append(np.squeeze(m))

            m_ref_vec.append(p.magnetization)

            # Susceptibilit from quadratic expectation value
            if np.abs(p.h_field) < 1e-9:
                out.chi_exp = p.magnetization2 * 2 * p.beta

    h_vec, m_vec, m_ref_vec = np.array(h_vec), np.array(m_vec), np.array(
        m_ref_vec)
    sidx = np.argsort(h_vec)
    d.h_vec, d.m_vec, d.m_ref_vec = h_vec[sidx], m_vec[sidx], m_ref_vec[sidx]

    from scipy.interpolate import InterpolatedUnivariateSpline as IUS

    spl = IUS(d.h_vec, d.m_ref_vec)
    out.chi = -spl(0, nu=1)  # Linear response
    out.beta = p.beta

    print 'beta, chi, chi_exp =', out.beta, out.chi, out.chi_exp

    filename_out = 'data_pyed_extrap_h_field_beta%6.6f.h5' % out.beta
    with HDFArchive(filename_out, 'w') as s:
        s['field'] = out

    for key, value in out.dict().items():
        setattr(d, key, value)

    if plot: plot_field(d)
Ejemplo n.º 8
0
def load_from_DMFT(filename, n_iter=0):
    with HDFArchive(filename, 'r') as QMC:
        G_tau = QMC["G_tau"]
        G_iw = QMC["G_iw"]
        G_l = QMC["G_l"]
        Sigma_iw = QMC["Sigma"]
        G_iw_list = [QMC["G_iw_iter{}".format(i)] for i in range(n_iter)]
    return G_tau, G_iw, G_l, Sigma_iw, G_iw_list
Ejemplo n.º 9
0
def SOM(input_filename="dmft_results.h5", output_filename="som_results.h5"):

    # Read G(\tau) from archive
    # Could be G(i\omega_n) or G_l as well.
    with HDFArchive(input_filename, 'r') as QMC:
        G_tau = QMC["G_tau"]
        G_iw = QMC["G_iw"]
        G_l = QMC["G_l"]

    # Paramagnetic case: average spin up and spin down GF
    g_tau = (G_tau['up'] + G_tau['down']) / 2.
    g_iw = (G_iw['up'] + G_iw['down']) / 2.
    g_l = (G_l['up'] + G_l['down']) / 2.

    # Prepare input data: reduce the number of \tau-slices from 10001 to n_tau
    # reduce the number of Legendre coefficients to n_l
    g_tau_rebinned = rebinning_tau(g_tau, n_tau)
    g_l_cut = cut_coefficients(g_l, n_l)

    # Set the weight function S to a constant (all points of G_tau are equally important)
    S_tau = g_tau_rebinned.copy()
    S_tau.data[:] = 1.0

    S_l = g_l_cut.copy()
    S_l.data[:] = 1.0

    # Construct a SOM object
    #cont = Som(g_tau_rebinned, S_tau, kind = "FermionGf")
    cont = Som(g_l_cut, S_l, kind="FermionGf")

    # Run!
    cont.run(**run_params)

    # Create a real frequency GF obtained with SOM
    g_w = GfReFreq(window=energy_window, n_points=n_w, indices=[0])
    g_w << cont

    # G(\tau) reconstructed from the SOM solution
    g_rec_tau = g_tau_rebinned.copy()
    g_rec_tau << cont

    # On master node, save results to an archive
    if mpi.is_master_node():
        with HDFArchive(output_filename, 'w') as Results:
            Results['g_rec_tau'] = g_rec_tau
            Results['g_w'] = g_w
Ejemplo n.º 10
0
def load_from_QMC(filename):
    with HDFArchive(filename, 'r') as QMC:
        G_tau = QMC["G_tau"]
        G_iw = QMC["G_iw"]
        G_l = QMC["G_l"]
        rho = QMC["rho"]
        k_histogram = QMC["k_histogram"]
        average_sign = QMC["average_sign"]
    return G_tau, G_iw, G_l, rho, k_histogram, average_sign
Ejemplo n.º 11
0
def load_from_QMC(filename, l_max=10, n_samples=16):
    rho = []
    for l in range(l_max + 1):
        rho.append([])
        for i in range(n_samples):
            with HDFArchive(filename, 'r') as QMC:
                rho[-1].append(
                    make_diagonal_rho(QMC["rho_l{}_i{}".format(l, i)]))
    return rho
Ejemplo n.º 12
0
def make_calc(beta=2.0, h_field=0.0):
    
    # ------------------------------------------------------------------
    # -- Hubbard atom with two bath sites, Hamiltonian

    p = ParameterCollection(
        beta = beta,
        h_field = h_field,
        U = 5.0,
        ntau = 40,
        niw = 15,
        )

    p.mu = 0.5*p.U
    
    # ------------------------------------------------------------------

    print '--> Solving SIAM with parameters'
    print p
    
    # ------------------------------------------------------------------

    up, do = 'up', 'dn'
    docc = c_dag(up,0) * c(up,0) * c_dag(do,0) * c(do,0)
    mA = c_dag(up,0) * c(up,0) - c_dag(do,0) * c(do,0)
    nA = c_dag(up,0) * c(up,0) + c_dag(do,0) * c(do,0)

    p.H = -p.mu * nA + p.U * docc + p.h_field * mA
    
    # ------------------------------------------------------------------

    fundamental_operators = [c(up,0), c(do,0)]
    
    ed = TriqsExactDiagonalization(p.H, fundamental_operators, p.beta)

    g_tau = GfImTime(beta=beta, statistic='Fermion', n_points=40, indices=[0])
    g_iw = GfImFreq(beta=beta, statistic='Fermion', n_points=10, indices=[0])

    p.G_tau = BlockGf(name_list=[up,do], block_list=[g_tau]*2, make_copies=True)
    p.G_iw = BlockGf(name_list=[up,do], block_list=[g_iw]*2, make_copies=True)
    
    ed.set_g2_tau(p.G_tau[up], c(up,0), c_dag(up,0))
    ed.set_g2_tau(p.G_tau[do], c(do,0), c_dag(do,0))

    ed.set_g2_iwn(p.G_iw[up], c(up,0), c_dag(up,0))
    ed.set_g2_iwn(p.G_iw[do], c(do,0), c_dag(do,0))

    p.magnetization = ed.get_expectation_value(0.5 * mA)
    p.magnetization2 = ed.get_expectation_value(0.25 * mA * mA)
    
    # ------------------------------------------------------------------
    # -- Store to hdf5
    
    filename = 'data_pyed_h_field_%4.4f.h5' % h_field
    with HDFArchive(filename,'w') as res:
        res['p'] = p
Ejemplo n.º 13
0
def calc_dynamic(plot=True):

    filenames = glob.glob('data_cthyb*.h5')
    if len(filenames) != 1: return
    filename = filenames[0]

    print '--> Loading:', filename
    with HDFArchive(filename, 'r') as s:
        p = s['p']

    p.chi_m = p.G2_iw_ph[('up', 'up')] - p.G2_iw_ph[('up', 'do')]
    p.chi = np.sum(p.chi_m.data) / p.beta**2

    with HDFArchive(filename, 'w') as s:
        s['p'] = p

    print 'beta, chi =', p.beta, p.chi

    if plot: plot_dynamic(p)
Ejemplo n.º 14
0
def calc_field(plot=True):

    filenames = glob.glob('data_pyed_h_field*.h5')

    out = ParameterCollection(data=[])
    h_vec, m_vec, m_ref_vec = [], [], []

    for filename in filenames:

        print '--> Loading:', filename

        with HDFArchive(filename, 'r') as s:
            p = s['p']
            out.data.append(p)
            h_vec.append(p.h_field)

            m = 0.5 * (-p.G_tau['up'](p.beta) + p.G_tau['dn'](p.beta))
            m_vec.append(np.squeeze(m))

            m_ref_vec.append(p.magnetization)

    h_vec, m_vec, m_ref_vec = np.array(h_vec), np.array(m_vec), np.array(
        m_ref_vec)
    sidx = np.argsort(h_vec)
    out.h_vec, out.m_vec, out.m_ref_vec = h_vec[sidx], m_vec[sidx], m_ref_vec[
        sidx]

    from scipy.interpolate import InterpolatedUnivariateSpline as IUS

    spl = IUS(out.h_vec, out.m_ref_vec)
    out.chi = -spl(0, nu=1)  # Linear response
    out.beta = p.beta

    print 'beta, chi =', out.beta, out.chi

    filename_out = 'data_pyed_extrap_h_field_beta%6.6f.h5' % out.beta
    with HDFArchive(filename_out, 'w') as s:
        s['field'] = out

    if plot: plot_field(out)
Ejemplo n.º 15
0
def DMFT(U, e_d, t, beta, filename="dmft_results.h5"):
    # Construct the CT-HYB-QMC solver
    S = Solver(beta=beta, gf_struct={'up': [0], 'down': [0]}, n_l=50)

    # Initialize Delta
    Delta = GfImFreq(beta=beta, indices=[0])
    Delta << t**2 * SemiCircular(half_bandwidth=2 * t)

    # Now do the DMFT loop
    n_iter = 8
    for iter in range(n_iter):

        # Compute new S.G0_iw
        for name, g0 in S.G0_iw:
            g0 << inverse(iOmega_n - e_d - Delta)
        # Run the solver
        S.solve(
            h_int=U * n('up', 0) * n('down', 0),  # Local Hamiltonian
            n_cycles=200000,  # Number of QMC cycles
            length_cycle=50,  # Length of a cycle
            n_warmup_cycles=2000,  # How many warmup cycles
            measure_g_l=True)
        # Compute new Delta with the self-consistency condition while imposing paramagnetism
        g_l = (S.G_l['up'] + S.G_l['down']) / 2.
        Delta.set_from_legendre(t**2 * g_l)

        # Intermediate saves
        if mpi.is_master_node():
            with HDFArchive(filename) as Results:
                Results["G_tau_iter{}".format(iter)] = S.G_tau
                Results["G_iw_iter{}".format(iter)] = S.G_iw
                Results["G_l_iter{}".format(iter)] = S.G_l
                Results["Sigma_iter{}".format(iter)] = S.Sigma_iw

    if mpi.is_master_node():
        with HDFArchive(filename) as Results:
            Results["G_tau"] = S.G_tau
            Results["G_iw"] = S.G_iw
            Results["G_l"] = S.G_l
            Results["Sigma"] = S.Sigma_iw
Ejemplo n.º 16
0
    def change_dataset(self, *args):
        self.locked1 = True
        self.quantities = []
        if self.pickle_mode:
            with open(self.h5_file, 'r') as fi:
                self._result = _get_path(self.dataset.get(), pickle.load(fi))
        else:
            with HDFArchive(self.h5_file, 'r') as arx:
                self._result = _get_path(self.dataset.get(), arx)

        for function in dir(self._result):
            if not function.startswith("plot_"):
                continue
            # if there is an error with getting the data
            # we do not want to offer it in the menu
            try:
                getattr(self._result, function).original(self._result)
                self.quantities.append(function[5:])
            except Exception as e:
                print(e)
                pass

        if not hasattr(self._result, 'analyzer_results'):
            ar = []
        elif self._result.matrix_structure is not None and self._result.element_wise:
            m = product(
                *map(range, self._result.effective_matrix_structure))
            ar = [(i, self._get_ar_i(i)) for i in m]
        else:
            ar = [(None, self._result.analyzer_results)]
        for ia, a in ar:
            for key, analyzer in a.iteritems():
                for function in dir(analyzer):
                    if not function.startswith("plot_"):
                        continue
                    # if there is an error with getting the data
                    # we do not want to offer it in the menu
                    try:
                        getattr(analyzer, function).\
                            original(analyzer, self._result, element=ia)
                        ky = key + ': ' + function[5:]
                        if ky not in self.quantities:
                            self.quantities.append(ky)
                    except:
                        pass

        self.update_quantity_ui()
        self.locked1 = False
        self.update_plot()
Ejemplo n.º 17
0
def read_TarGZ_HDFArchive(filename):

    import tarfile
    from tempfile import NamedTemporaryFile

    tar = tarfile.open(filename, "r:gz")
    f = tar.extractfile(tar.getmembers()[0])

    tmp = NamedTemporaryFile(delete=False)
    tmp.write(f.read())
    tmp.close()

    with HDFArchive(tmp.name, 'r') as res:
        p = res['p']

    os.remove(tmp.name)

    return p
Ejemplo n.º 18
0
def write_TarGZ_HDFArchive(filename, **kwargs):

    import os
    import tarfile
    from pytriqs.archive import HDFArchive

    filename = filename.split('.')[0]
    filename_h5 = filename + '.h5'
    filename_tar = filename + '.tar.gz'

    with HDFArchive(filename_h5, 'w') as res:
        for key, value in kwargs.items():
            res[key] = value

    with tarfile.open(filename_tar, 'w:gz') as tar:
        tar.add(filename_h5)

    os.remove(filename_h5)
Ejemplo n.º 19
0
def read_TarGZ_HDFArchive(filename):

    import os
    import tarfile
    from tempfile import NamedTemporaryFile
    from pytriqs.archive import HDFArchive

    tar = tarfile.open(filename, "r:gz")
    f = tar.extractfile(tar.getmembers()[0])

    tmp = NamedTemporaryFile(delete=False)
    tmp.write(f.read())
    tmp.close()

    data = HDFArchive(tmp.name, 'r')

    os.remove(tmp.name)

    return data
Ejemplo n.º 20
0
    def test_ortho(self):
        self.proj_gr.orthogonalize()

        dens_mat, overl = self.proj_sh.density_matrix(self.el_struct)

#        testout = _rpath + 'projortho.out.test'
#        with open(testout, 'wt') as f:
#            f.write("density matrix: %s\n"%(dens_mat))
#            f.write("overlap matrix: %s\n"%(overl))
        testout = _rpath + 'projortho.test.h5'
        with HDFArchive(testout, 'w') as h5test:
            h5test['density_matrix'] = dens_mat
            h5test['overlap_matrix'] = overl

# FIXME: seems redundant, as 'overl' is written to the file anyway
        self.assertEqual(overl, np.eye(5))

#        expected_file = _rpath + 'projortho.out'
        expected_file = _rpath + 'projortho.out.h5'
#        self.assertFileEqual(testout, expected_file)
        self.assertH5FileEqual(testout, expected_file)
Ejemplo n.º 21
0
def initialize_outputfile(iter_):

    if world.Get_rank() == 0:
        if iter_ < 10:
            filename = data_folder + "/iteration_00" + str(iter_) + ".h5"
        elif iter_ < 100:
            filename = data_folder + "/iteration_0" + str(iter_) + ".h5"
        elif iter_ < 1000:
            filename = data_folder + "/iteration_" + str(iter_) + ".h5"
        else:
            print 'too many iterations...'
            exit()

        print 'filename', filename
        results = HDFArchive(filename, 'w')

        import inspect
        import __main__
        source = inspect.getsource(__main__)
        results["source_file"] = source

        return results
Ejemplo n.º 22
0
    def test_ortho_normion(self):
        self.proj_gr.normion = True
        self.proj_gr.orthogonalize()

        dens_mat, overl = self.proj_sh.density_matrix(self.el_struct)

        #        testout = _rpath + 'projortho_normion.out.test'
        #        with open(testout, 'wt') as f:
        #            f.write("density matrix: %s\n"%(dens_mat))
        #            f.write("overlap matrix: %s\n"%(overl))
        testout = _rpath + 'projortho_normion.test.h5'
        with HDFArchive(testout, 'w') as h5test:
            h5test['density_matrix'] = dens_mat
            h5test['overlap_matrix'] = overl

# FIXME: redundant
        self.assertEqual(overl[0, 0, ...], np.eye(5))
        self.assertEqual(overl[0, 1, ...], np.eye(5))

        #        expected_file = _rpath + 'projortho_normion.out'
        #        self.assertFileEqual(testout, expected_file)
        expected_file = _rpath + 'projortho_normion.out.h5'
        self.assertH5FileEqual(testout, expected_file)
Ejemplo n.º 23
0
def make_calc(beta=2.0, nwf=8):
    
    # ------------------------------------------------------------------
    # -- Hubbard atom with two bath sites, Hamiltonian

    p = ParameterCollection(
        beta = beta,
        U = 5.0,
        nw = 1,
        nwf = nwf,
        nwf_gf = 2*nwf,
        )

    ana = analytic_hubbard_atom(**p.dict())

    p.chi = np.sum(ana.chi_m.data) / p.beta**2
    
    # ------------------------------------------------------------------
    # -- Store to hdf5
    
    filename = 'data_dynamic_beta%6.6f_nwf%i.h5' % (p.beta, p.nwf)
    with HDFArchive(filename,'w') as res:
        res['p'] = p
Ejemplo n.º 24
0
 def get_datasets(self, ar=None, path=''):
     ret = []
     is_dir = False
     if ar is None:
         if self.pickle_mode:
             with open(self.h5_file, 'r') as fi:
                 return self.get_datasets(pickle.load(fi), path)
         else:
             with HDFArchive(self.h5_file, 'r') as ar:
                 return self.get_datasets(ar, path)
     # we detect whether it is a dataset directory
     if isinstance(ar, MaxEntResultData):
         is_dir = True
         if len(path) == 0:
             path = '/'
         ret.append(path)
         return ret
     for key in ar:
         try:
             ret += self.get_datasets(ar[key], path + '/' + key)
         except:
             pass
     return ret
Ejemplo n.º 25
0
import numpy as np
import matplotlib.pyplot as plt

# ----------------------------------------------------------------------

from pytriqs.archive import HDFArchive
from pytriqs.gf import MeshBrillouinZone

# ----------------------------------------------------------------------
if __name__ == '__main__':

    filename = 'data_e_k_and_chi00_wk.h5'

    with HDFArchive(filename, 'r') as arch:
        e_k = arch['e_k']

    k = np.linspace(-0.5, 0.5, num=200) * 2. * np.pi
    Kx, Ky = np.meshgrid(k, k)

    e_k_interp = np.vectorize(lambda kx, ky: e_k([kx, ky, 0])[0, 0].real)
    e_k_interp = e_k_interp(Kx, Ky)

    plt.imshow(
        e_k_interp,
        cmap=plt.get_cmap('RdBu'),
        extent=(k.min(), k.max(), k.min(), k.max()),
        origin='lower',
    )
    plt.colorbar()

    plt.contour(Kx, Ky, e_k_interp, levels=[0])
Ejemplo n.º 26
0
# This plotting script is largely based on a work of Malte Harland
# [email protected]

from pytriqs.gf.local import *
from pytriqs.gf.local.descriptors import *
from pytriqs.archive import HDFArchive
from pytriqs.statistics.histograms import Histogram
from matplotlib import pyplot as plt
from matplotlib.backends.backend_pdf import PdfPages
import numpy as np
from scipy.integrate import quad

from dos import A, e_min, e_max, e_low

arch = HDFArchive('triangles.h5','r')
pp = PdfPages('triangles.pdf')

abs_errors = arch['abs_errors']

def make_ref(g):
    print "Norm of the reference DOS:", quad(A, e_min, e_max, limit=100)[0].real
    g << Function(lambda w: -1j*np.pi * A(w.real))

def plot_A_w(g_w, g_w_ref, fig):
    w_mesh = [w for w in g_w.mesh]

    ax = fig.add_axes([.1,.54,.55,.4])
    ax.plot(w_mesh, -g_w.data[:,0,0].imag/np.pi,
            color = 'red', linewidth = 0.6, label = 'SOM')
    ax.plot(w_mesh, -g_w_ref.data[:,0,0].imag/np.pi,
            color = 'blue', linewidth = 0.6, linestyle='dashed', label = 'reference')
Ejemplo n.º 27
0
    def run_dmft_loops(self, n_dmft_loops = 1):
        """runs the DMFT calculation"""
        clp = p = CleanLoopParameters(self.get_parameters())
        report = Reporter(**clp)
        report('Parameters:', clp)
        scheme = Scheme(**clp)
        dmft = DMFTObjects(**clp)
        raw_dmft = DMFTObjects(**clp)
        g_0_c_iw, g_c_iw, sigma_c_iw, dmu = dmft.get_dmft_objs() # ref, ref, ref, value

        report('Initializing...')
        if clp['nambu']:
            transf = NambuTransformation(**clp)
            scheme.set_pretransf(transf.pretransformation(), transf.pretransformation_inverse())
            raw_transf = NambuTransformation(**clp)
        else:
            transf = ClustersiteTransformation(g_loc = scheme.g_local(sigma_c_iw, dmu), **clp)
            clp.update({'g_transf_struct': transf.get_g_struct()})
            raw_transf = ClustersiteTransformation(**clp)
        
        transf.set_hamiltonian(**clp)
        report('Transformation ready')
        report('New basis:', transf.get_g_struct())
        impurity = Solver(beta = clp['beta'], gf_struct = dict(transf.get_g_struct()), 
                          n_tau = clp['n_tau'], n_iw = clp['n_iw'], n_l = clp['n_legendre'])
        impurity.Delta_tau.name = '$\\tilde{\\Delta}_c$'
        rnames = random_generator_names_list()
        report('H = ', transf.hamiltonian)
        report('Impurity solver ready')
        report('')

        for loop_nr in range(self.next_loop(), self.next_loop() + n_dmft_loops):
            report('DMFT-loop nr. %s'%loop_nr)
            if mpi.is_master_node(): duration = time()

            report('Calculating dmu...')
            dmft.find_dmu(scheme, **clp)
            g_0_c_iw, g_c_iw, sigma_c_iw, dmu = dmft.get_dmft_objs()
            report('dmu = %s'%dmu)

            report('Calculating local Greenfunction...')
            g_c_iw << scheme.g_local(sigma_c_iw, dmu)
            g_c_iw << addExtField(g_c_iw, p['ext_field'])
            if mpi.is_master_node() and p['verbosity'] > 1: checksym_plot(g_c_iw, p['archive'][0:-3] + 'Gchecksym' + str(loop_nr) + '.pdf')
            report('Calculating Weiss-field...')
            g_0_c_iw << inverse(inverse(g_c_iw) + sigma_c_iw)

            dmft.make_g_0_iw_with_delta_tau_real()

            report('Changing basis...')
            transf.set_dmft_objs(*dmft.get_dmft_objs())
            if mpi.is_master_node() and p['verbosity'] > 1: 
                checktransf_plot(transf.get_g_iw(), p['archive'][0:-3] + 'Gchecktransf' + str(loop_nr) + '.pdf')
                checktransf_plot(g_0_c_iw, p['archive'][0:-3] + 'Gweisscheck' + str(loop_nr) + '.pdf')
                checksym_plot(inverse(transf.g_0_iw), p['archive'][0:-3] + 'invGweisscheckconst' + str(loop_nr) + '.pdf')
                #checksym_plot(inverse(transf.get_g_iw()), p['archive'][0:-3] + 'invGsymcheckconst' + str(loop_nr) + '.pdf')

            if not clp['random_name']: clp.update({'random_name': rnames[int((loop_nr + mpi.rank) % len(rnames))]}) # TODO move
            if not clp['random_seed']: clp.update({'random_seed': 862379 * mpi.rank + 12563 * self.next_loop()})
            impurity.G0_iw << transf.get_g_0_iw()
            report('Solving impurity problem...')
            mpi.barrier()
            impurity.solve(h_int = transf.get_hamiltonian(), **clp.get_cthyb_parameters())

            if mpi.is_master_node() and p['verbosity'] > 1:
                checksym_plot(inverse(impurity.G0_iw), p['archive'][0:-3] + 'invGweisscheckconstsolver' + str(loop_nr) + '.pdf')
            report('Postprocessing measurements...')
            if clp['measure_g_l']:
                for ind, g in transf.get_g_iw(): g  << LegendreToMatsubara(impurity.G_l[ind])
            else:
                for ind, g in transf.get_g_iw(): g.set_from_fourier(impurity.G_tau[ind])
            raw_transf.set_dmft_objs(transf.get_g_0_iw(),
                                     transf.get_g_iw(),
                                     inverse(transf.get_g_0_iw()) - inverse(transf.get_g_iw()))
            if clp['measure_g_tau'] and clp['fit_tail']:
                for ind, g in transf.get_g_iw():
                    for tind in transf.get_g_struct():
                        if tind[0] == ind: block_inds = tind[1]
                    fixed_moments = TailGf(len(block_inds), len(block_inds), 1, 1)
                    fixed_moments[1] = identity(len(block_inds))
                    g.fit_tail(fixed_moments, 3, clp['tail_start'], clp['n_iw'] - 1)
            if mpi.is_master_node() and p['verbosity'] > 1: checksym_plot(inverse(transf.get_g_iw()), p['archive'][0:-3] + 'invGsymcheckconstsolver' + str(loop_nr) + '.pdf')
            report('Backtransforming...')
            transf.set_sigma_iw(inverse(transf.get_g_0_iw()) - inverse(transf.get_g_iw()))
            dmft.set_dmft_objs(*transf.get_backtransformed_dmft_objs())
            dmft.set_dmu(dmu)
            raw_dmft.set_dmft_objs(*raw_transf.get_backtransformed_dmft_objs())
            raw_dmft.set_dmu(dmu)

            if clp['mix']: dmft.mix()
            if clp['impose_paramagnetism']: dmft.paramagnetic()
            if clp['impose_afm']: dmft.afm()
            if clp['site_symmetries']: dmft.site_symmetric(clp['site_symmetries'])
            density = scheme.apply_pretransf_inv(dmft.get_g_iw(), True).total_density()

            report('Saving results...')
            if mpi.is_master_node():
                a = HDFArchive(p['archive'], 'a')
                if not a.is_group('results'):
                    a.create_group('results')
                a_r = a['results']
                a_r.create_group(str(loop_nr))
                a_l = a_r[str(loop_nr)]
                a_l['g_c_iw'] = dmft.get_g_iw()
                a_l['g_c_iw_raw'] = raw_dmft.get_g_iw()
                a_l['g_transf_iw'] = transf.get_g_iw()
                a_l['g_transf_iw_raw'] = raw_transf.get_g_iw()
                a_l['sigma_c_iw'] = dmft.get_sigma_iw()
                a_l['sigma_c_iw_raw'] = raw_dmft.get_sigma_iw()
                a_l['sigma_transf_iw'] = transf.get_sigma_iw()
                a_l['sigma_transf_iw_raw'] = raw_transf.get_sigma_iw()
                a_l['g_0_c_iw'] = dmft.get_g_0_iw()
                a_l['g_0_c_iw_raw'] = raw_dmft.get_g_0_iw()
                a_l['g_0_transf_iw'] = transf.get_g_0_iw()
                a_l['g_0_transf_iw_raw'] = raw_transf.get_g_0_iw()
                a_l['dmu'] = dmft.get_dmu()
                a_l['density'] = density
                a_l['loop_time'] = {'seconds': time() - duration,
                                    'hours': (time() - duration)/3600., 
                                    'days': (time() - duration)/3600./24.}
                a_l['n_cpu'] = mpi.size
                a_l['cdmft_code_version'] = CDmft._version
                clp_dict = dict()
                clp_dict.update(clp)
                a_l['parameters'] = clp_dict
                a_l['triqs_code_version'] = version

                a_l['delta_transf_tau'] = impurity.Delta_tau
                if clp['measure_g_l']: a_l['g_transf_l'] = impurity.G_l
                if clp['measure_g_tau']: a_l['g_transf_tau'] = impurity.G_tau
                a_l['sign'] = impurity.average_sign
                if clp['measure_density_matrix']: a_l['density_matrix'] = impurity.density_matrix
                a_l['g_atomic_tau'] = impurity.atomic_gf
                a_l['h_loc_diagonalization'] = impurity.h_loc_diagonalization
                if a_r.is_data('n_dmft_loops'):
                    a_r['n_dmft_loops'] += 1
                else:
                    a_r['n_dmft_loops'] = 1
                del a_l, a_r, a
            report('Loop done')
            report('')
            mpi.barrier()
Ejemplo n.º 28
0
# Solver parameters
p = {}
p["max_time"] = -1
p["length_cycle"] = 50
p["n_warmup_cycles"] = 50
p["n_cycles"] = 5000

Converter = Wien2kConverter(filename=dft_filename, repacking=True)
Converter.convert_dft_input()
mpi.barrier()

previous_runs = 0
previous_present = False
if mpi.is_master_node():
    f = HDFArchive(dft_filename+'.h5','a')
    if 'dmft_output' in f:
        ar = f['dmft_output']
        if 'iterations' in ar:
            previous_present = True
            previous_runs = ar['iterations']
    else:
        f.create_group('dmft_output')
    del f
previous_runs    = mpi.bcast(previous_runs)
previous_present = mpi.bcast(previous_present)

SK=SumkDFT(hdf_file=dft_filename+'.h5',use_dft_blocks=use_blocks,h_field=h_field)

n_orb = SK.corr_shells[0]['dim']
l = SK.corr_shells[0]['l']
Ejemplo n.º 29
0
def make_calc():

    # ------------------------------------------------------------------
    # -- Hamiltonian

    p = ParameterCollection(
        beta = 0.5,
        U = 0.5,
        nw = 1,
        nwf = 15,
        V = 1.0,
        eps = 0.2,
        )

    p.nwf_gf = 4 * p.nwf
    p.mu = 0.5*p.U

    # ------------------------------------------------------------------

    ca_up, cc_up = c('0', 0), c_dag('0', 0)
    ca_do, cc_do = c('0', 1), c_dag('0', 1)

    ca0_up, cc0_up = c('1', 0), c_dag('1', 0)
    ca0_do, cc0_do = c('1', 1), c_dag('1', 1)

    docc = cc_up * ca_up * cc_do * ca_do
    nA = cc_up * ca_up + cc_do * ca_do
    hybridiz = p.V * (cc0_up * ca_up + cc_up * ca0_up + cc0_do * ca_do + cc_do * ca0_do)
    bath_lvl = p.eps * (cc0_up * ca0_up + cc0_do * ca0_do)

    p.H_int = p.U * docc
    p.H = -p.mu * nA + p.H_int + hybridiz + bath_lvl

    # ------------------------------------------------------------------
    # -- Exact diagonalization

    # Conversion from TRIQS to Pomerol notation for operator indices
    # TRIQS:   block_name, inner_index
    # Pomerol: site_label, orbital_index, spin_name
    index_converter = {
        ('0', 0) : ('loc', 0, 'up'),
        ('0', 1) : ('loc', 0, 'down'),
        ('1', 0) : ('loc', 1, 'up'),
        ('1', 1) : ('loc', 1, 'down'),
        }

    # -- Create Exact Diagonalization instance
    ed = PomerolED(index_converter, verbose=True)
    ed.diagonalize(p.H) # -- Diagonalize H

    p.gf_struct = [['0', [0, 1]]]

    # -- Single-particle Green's functions
    p.G_iw = ed.G_iw(p.gf_struct, p.beta, n_iw=p.nwf_gf)['0']

    # -- Particle-particle two-particle Matsubara frequency Green's function
    opt = dict(
        beta=p.beta, gf_struct=p.gf_struct,
        blocks=set([("0", "0")]),
        n_iw=p.nw, n_inu=p.nwf)

    p.G2_iw_ph = ed.G2_iw_inu_inup(channel='PH', **opt)[('0', '0')]

    filename = 'data_pomerol.h5'
    with HDFArchive(filename,'w') as res:
        res['p'] = p

    import os
    os.system('tar czvf data_pomerol.tar.gz data_pomerol.h5')
    os.remove('data_pomerol.h5')
Ejemplo n.º 30
0
p["n_cycles"] = 1000000
p["perfrom_tail_fit"] = True
p["fit_max_moments"] = 4
p["fit_min_n"] = 30
p["fit_max_n"] = 60

# If conversion step was not done, we could do it here. Uncomment the lines it you want to do this.
#from pytriqs.applications.dft.converters.wien2k_converter import *
#Converter = Wien2kConverter(filename=dft_filename, repacking=True)
#Converter.convert_dft_input()
#mpi.barrier()

previous_runs = 0
previous_present = False
if mpi.is_master_node():
    f = HDFArchive(dft_filename+'.h5','a')
    if 'dmft_output' in f:
        ar = f['dmft_output']
        if 'iterations' in ar:
            previous_present = True
            previous_runs = ar['iterations']
    else:
        f.create_group('dmft_output')
    del f
previous_runs    = mpi.bcast(previous_runs)
previous_present = mpi.bcast(previous_present)

SK=SumkDFT(hdf_file=dft_filename+'.h5',use_dft_blocks=use_blocks,h_field=h_field)

n_orb = SK.corr_shells[0]['dim']
l = SK.corr_shells[0]['l']
Ejemplo n.º 31
0
gm_flip_spins_all = {'flip_spins_all' : {mkind("up",1) : mkind("dn",1), mkind("dn",1) : mkind("up",1),
                                         mkind("up",2) : mkind("dn",2), mkind("dn",2) : mkind("up",2)}}
gm_swap_atoms     = {'swap_atoms' :     {mkind("up",1) : mkind("up",2), mkind("dn",1) : mkind("dn",2),
                                         mkind("up",2) : mkind("up",1), mkind("dn",2) : mkind("dn",1)}}

# Construct the solver
S = SolverCore(beta=beta, gf_struct=gf_struct, n_tau=n_tau, n_iw=n_iw)

# Set hybridization function
delta_w = GfImFreq(indices = [1,2], beta=beta)
delta_w << (V**2)*(inverse(iOmega_n - epsilon) + inverse(iOmega_n + epsilon))
for sn in spin_names:
    S.G0_iw[sn] << inverse(iOmega_n - np.matrix([[-mu,t],[t,-mu]]) - delta_w)

if mpi.is_master_node():
    arch = HDFArchive("move_global_beta%.0f_prob%.2f.h5" %(beta,move_global_prob) ,'w')
    arch['beta'] = beta
    arch['move_global_prob'] = move_global_prob

static_observables = {"N1_up" : n(*mkind("up",1)), "N1_dn" : n(*mkind("dn",1)),
                      "N2_up" : n(*mkind("up",2)), "N2_dn" : n(*mkind("dn",2))}

global_moves = [('none',{}),
                ('flip_spins_1',gm_flip_spins_1),
                ('flip_spins_all',gm_flip_spins_all),
                ('swap_atoms',gm_swap_atoms),
                ('swap_and_flip',dict(gm_flip_spins_all,**gm_swap_atoms))]

for gm_name, gm in global_moves:
    mpi.report("Running with global moves set '%s'" % gm_name)
    if gm_name != 'none':
Ejemplo n.º 32
0
from pytriqs.gf.local import *
from pytriqs.gf.local.descriptors import iOmega_n
g = GfImFreq(indices = [1], beta = 300, n_points = 1000, name = "g")
g << inverse( iOmega_n + 0.5 )

print " van plot"
oplot (g,     '-o', x_window = (0,3) )     

print "plot done"
g << inverse( iOmega_n + 0.5 )
 
print "ok ----------------------"


from pytriqs.archive import HDFArchive
R = HDFArchive('myfile.h5', 'r')

for n, calculation in R.items() : 
    #g = calculation['g']
    g << inverse( iOmega_n + 0.5 )
    
    print "pokokook"

    X,Y = g.x_data_view (x_window = (0,0.2), flatten_y = True )

    #fitl = Fit ( X,Y.imag, linear )
    g << inverse( iOmega_n + 0.5 )

    print " van plot"
    oplot (g,     '-o', x_window = (0,3) )     
    g << inverse( iOmega_n + 0.5 )
Ejemplo n.º 33
0
run_params = {'energy_window' : (-4.0,7.0)}
run_params['verbosity'] = 3
run_params['adjust_f'] = True
run_params['adjust_l'] = True
run_params['t'] = 500
run_params['f'] = 500
run_params['l'] = 150
run_params['make_histograms'] = True

g_tau = GfImTime(beta = beta, n_points = n_tau, indices = indices)
g_w = GfReFreq(window = run_params['energy_window'], n_points = n_w, indices = indices)
S_tau = g_tau.copy()
g_tau_rec = g_tau.copy()

if mpi.is_master_node():
    arch = HDFArchive('triangles.h5','w')
    arch['abs_errors'] = abs_error

for s in abs_error:
    if mpi.is_master_node():
        make_g_tau(g_tau)
        g_tau.data[:] += s * 2*(np.random.rand(*g_tau.data.shape) - 0.5)

    g_tau = mpi.bcast(g_tau)
    S_tau.data[:] = 1.0

    if mpi.is_master_node():
        gr_name = 'abs_error_%.4f' % s
        arch.create_group(gr_name)
        abs_err_gr = arch[gr_name]
Ejemplo n.º 34
0
qn = [n("up",0),n("dn",0)]
p["partition_method"] = "quantum_numbers"
p["quantum_numbers"] = qn

# Construct solver
S = SolverCore(beta=beta, gf_struct=gf_struct, n_tau=n_tau, n_iw=n_iw)

def read_histo(f,type_of_col_1):
    histo = []
    for line in f:
        cols = filter(lambda s: s, line.split(' '))
        histo.append((type_of_col_1(cols[0]),float(cols[1]),float(cols[2])))
    return histo

if mpi.is_master_node():
    arch = HDFArchive('asymm_bath.h5','w')

# Set hybridization function
for e in epsilon:
    delta_w = GfImFreq(indices = [0], beta=beta)
    delta_w << (V**2) * inverse(iOmega_n - e)

    S.G0_iw["up"] << inverse(iOmega_n - ed - delta_w)
    S.G0_iw["dn"] << inverse(iOmega_n - ed - delta_w)

    S.solve(h_int=H, **p)

    if mpi.is_master_node():
        arch.create_group('epsilon_' + str(e))
        gr = arch['epsilon_' + str(e)]
        gr['G_tau'] = S.G_tau
Ejemplo n.º 35
0
# Now split using the total number of particles, N = N_up + N_dn
ad = AtomDiag(H, fops, [N_up + N_dn])
print ad.n_subspaces  # 7

# Split the Hilbert space automatically
ad = AtomDiag(H, fops)
print ad.n_subspaces  # 28

# Partition function for inverse temperature \beta=3
beta = 3
print partition_function(ad, beta)

# Equilibrium density matrix
dm = atomic_density_matrix(ad, beta)

# Expectation values of orbital double occupancies
print trace_rho_op(dm, n('up', 0) * n('dn', 0), ad)
print trace_rho_op(dm, n('up', 1) * n('dn', 1), ad)
print trace_rho_op(dm, n('up', 2) * n('dn', 2), ad)

# Atomic Green's functions
gf_struct = [['dn', orb_names], ['up', orb_names]]
G_w = atomic_g_w(ad, beta, gf_struct, (-2, 2), 400, 0.01)
G_tau = atomic_g_tau(ad, beta, gf_struct, 400)
G_iw = atomic_g_iw(ad, beta, gf_struct, 100)
G_l = atomic_g_l(ad, beta, gf_struct, 20)

# Finally, we save our AtomDiag object for later use
with HDFArchive('atom_diag_example.h5') as ar:
    ar['ad'] = ad
Ejemplo n.º 36
0
from pytriqs.gf.local import GfReFreq
from pytriqs.archive import HDFArchive
from math import pi

R = HDFArchive('myfile.h5', 'r') 
 
from pytriqs.plot.mpl_interface import oplot, plt

for name, g in R.items() :  # iterate on the elements of R, like a dict ...
    oplot( (- 1/pi * g).imag, "-o", name = name)

plt.xlim(-1,1) 
plt.ylim(0,7) 

p.savefig("./tut_ex3b.png") 

Ejemplo n.º 37
0
    # ------------------------------------------------------------------
    # -- Collect results

    d.Sigma_iw = solv.Sigma_iw['up']
    d.G_tau = solv.G_tau['up']
    d.G_l = solv.G_l['up']
    d.G0_iw = solv.G0_iw['up']

    d.G_iw = G_iw['up']
    d.Gl_tau = G_tau['up']

    d.runtime = runtime
    d.G2_tau = solv.g4_tau[('up', 'do')]
    d.G2_iw = solv.g4_iw[('up', 'do')]
    d.G2_iw_pp = solv.g4_iw_pp[('up', 'do')]
    d.G2_iw_ph = solv.g4_iw_ph[('up', 'do')]

    d.mpi_size = mpi.size

    d.perturbation_order = solv.perturbation_order
    d.perturbation_order_total = solv.perturbation_order_total

    # ------------------------------------------------------------------
    # -- Store results
    if mpi.is_master_node():
        filename = 'data_cthyb.h5'
        with HDFArchive(filename, 'w') as res:
            for key, value in d.__dict__.items():
                res[key] = value
#function to extract density for a given mu, to be used by dichotomy function to determine mu
def Dens(mu):
    dens = SK(mu=mu, Sigma=Sigma_lat).total_density()
    if abs(dens.imag) > 1e-20:
        mpi.report(
            "Warning: Imaginary part of density will be ignored ({})".format(
                str(abs(dens.imag))))
    return dens.real


#check if there are previous runs in the outfile and if so restart from there
previous_runs = 0
previous_present = False
mu = 0.
if mpi.is_master_node():
    ar = HDFArchive(outfile + '.h5', 'a')
    if 'iterations' in ar:
        previous_present = True
        previous_runs = ar['iterations']
        S.Sigma_iw = ar['Sigma_iw']
        mu = ar['mu-%d' % previous_runs]
        del ar
previous_runs = mpi.bcast(previous_runs)
previous_present = mpi.bcast(previous_present)
S.Sigma_iw = mpi.bcast(S.Sigma_iw)
mu = mpi.bcast(mu)

for iteration_number in range(1, nloops + 1):
    it = iteration_number + previous_runs
    if mpi.is_master_node():
        print('-----------------------------------------------')
Ejemplo n.º 39
0
# This plotting script is largely based on a work of Malte Harland
# [email protected]

from pytriqs.gf.local import *
from pytriqs.gf.local.descriptors import *
from pytriqs.archive import HDFArchive
from pytriqs.statistics.histograms import Histogram
from matplotlib import pyplot as plt
from matplotlib.backends.backend_pdf import PdfPages
import numpy as np
from scipy.integrate import quad

from dos import A, e_min, e_max, e_th, e_con

arch = HDFArchive('microgap.h5', 'r')
pp = PdfPages('microgap.pdf')

abs_errors = arch['abs_errors']


def make_ref(g):
    print "Norm of the reference DOS:", quad(A, e_min, e_max,
                                             limit=100)[0].real
    g << Function(lambda w: -1j * np.pi * A(w.real))


def plot_A_w(g_w, g_w_ref, fig):
    w_mesh = [w for w in g_w.mesh]

    ax = fig.add_axes([.1, .54, .55, .4])
    ax.plot(w_mesh,
Ejemplo n.º 40
0
    S.G0_iw[bn][i,i] <<= inverse(iOmega_n +mu - atomic_levels[(bn,i)] - delta_w)

    # Dump Delta parameters
    if Delta_dump:
        Delta_dump_file.write(bn + '\t')
        Delta_dump_file.write(str(V) + '\t')
        Delta_dump_file.write(str(e) + '\n')

print_master("Running the simulation...")

# Solve the problem
S.solve(**p)

# Save the results  
if mpi.rank==0:
    Results = HDFArchive(results_file_name,'w')
    for b in gf_struct: Results[b] = S.G_tau[b]

    import pytriqs.applications.impurity_solvers.cthyb.version as version
    import inspect
    import __main__
    Results.create_group("log")
    log = Results["log"]
    log["version"] = version.version
    log["release"] = version.release
    log["triqs_hash"] = version.triqs_hash
    log["cthyb_hash"] = version.cthyb_hash
    log["script"] = inspect.getsource(__main__)
    log["params"] = inspect.getsource(params)
Ejemplo n.º 41
0
Archivo: som.py Proyecto: krivenko/som
    energy_window = (G_w.mesh.omega_min, G_w.mesh.omega_max)
elif kind == 'BosonCorr' or kind == 'BosonAutoCorr':
    if mesh != 'legendre':
        chi = arch['chi_' + mesh_suffix]
    else:
        chi = GfLegendre(beta = beta, indices = [0,1], n_points = n_l)
        chi << MatsubaraToLegendre(arch['chi_iw'])
    chi_w = arch_ed['chi_w'].copy()
    n_w = len(chi_w.mesh)
    energy_window = (chi_w.mesh.omega_min, chi_w.mesh.omega_max)
else:
    raise RuntimeError('Unknown observable kind '+kind)

som_params['energy_window'] = energy_window

arch_som = HDFArchive(som_filename,'a')

if kind == 'FermionGf':
    G_rec = G.copy()
    G_w = arch_ed['G_w'].copy()
    histograms = {}
    for bn, g_block in G:
        # Construct a SOM object
        cont = Som(g_block, kind = kind)
        # Run!
        cont.run(**som_params)

        G_w[bn] << cont
        G_rec[bn] << cont
        histograms[bn] = cont.histograms
Ejemplo n.º 42
0
# Parameters for Som.run()
run_params = {'energy_window' : energy_window}
# Verbosity level
run_params['verbosity'] = 3
# Number of particular solutions to accumulate
run_params['l'] = 5000
# Number of global updates
run_params['f'] = 100
# Number of local updates per global update
run_params['t'] = 50
# Accumulate histogram of the objective function values
run_params['make_histograms'] = True

# Read \chi(i\omega_n) from archive
# Could be \chi(\tau) or \chi_l as well.
chi_iw = HDFArchive('example.h5', 'r')['chi_iw']

# Set the weight function S to a constant (all points of chi_iw are equally important)
S = chi_iw.copy()
S.data[:] = 1.0

# Estimated norms of spectral functions, (\pi/2) * \chi(i\omega_0)
norms = (numpy.pi/2) * numpy.array([chi_iw(0).real[0,0],
                                    chi_iw(0).real[1,1]])

# Construct a SOM object
cont = Som(chi_iw, S, kind = "BosonAutoCorr", norms = norms)

# Run!
# Takes 2-3 minutes on 4 cores ...
cont.run(**run_params)