def check_quantity(file_ref, quantity_name):

    file_new = "data_from_scratch/iteration_000.h5"

    results_ref =  HDFArchive(file_ref,'r')
    results_new =  HDFArchive(file_new,'r')

    quantity_ref = results_ref[quantity_name]
    quantity_new = results_new[quantity_name]

    print 'checking quantity ', quantity_name, '...'
    assert_block_gfs_are_close(quantity_new, quantity_ref, precision = 1e-10), \
Beispiel #2
0
def SIAM(U, e_f, V, D, beta, filename="qmc_results.h5"):
    # Create hybridization function
    Delta = V**2 * Flat(D)

    # Construct the impurity solver with the inverse temperature
    # and the structure of the Green's functions
    S = Solver(beta=beta, gf_struct={'up': [0], 'down': [0]}, n_l=50)

    # Initialize the non-interacting Green's function S.G0_iw
    for name, g0 in S.G0_iw:
        g0 << inverse(iOmega_n - e_f - Delta)

    # Run the solver. The results will be in S.G_tau, S.G_iw and S.G_l
    S.solve(
        h_int=U * n('up', 0) * n('down', 0),  # Local Hamiltonian
        n_cycles=2000000,  # Number of QMC cycles
        length_cycle=50,  # Length of one cycle
        n_warmup_cycles=20000,  # Warmup cycles
        measure_g_l=
        True,  # Measure G_l (representation of G in terms of Legendre polynomials)
        use_norm_as_weight=
        True,  # Necessary option for the measurement of the density matrix
        measure_density_matrix=True,  # Measure reduced impurity density matrix
        measure_pert_order=True)  # Measure histogram of k

    # Save the results in an HDF5 file (only on the master node)
    if mpi.is_master_node():
        with HDFArchive(filename, 'w') as Results:
            Results["G_tau"] = S.G_tau
            Results["G_iw"] = S.G_iw
            Results["G_l"] = S.G_l
            Results["rho"] = S.density_matrix
            Results["k_histogram"] = S.perturbation_order_total
            Results["average_sign"] = S.average_sign
Beispiel #3
0
def readold_sigma_iw_list(oldfile):

    if rank == 0:

        print 'oldfile', oldfile
        results = HDFArchive(oldfile, 'r')

        Sigma_iw_list = []

        n_iw_new = results["Sigma_iw___at_0/bl/mesh/size"]
        iw_mesh_new = MeshImFreq(beta, 'Fermion', n_iw_new / 2)
        ### n_iw for MeshImFreq is positive number of frequencies,
        ### when read out from hdf-file it is total number of freqs.

        for i in range(0, N_atoms):

            dataname = "Sigma_iw___at_" + str(i)
            tmp = results[dataname]

            S = BlockGf(mesh=iw_mesh_new, gf_struct=gf_struct)
            S["bl"].data[...] = tmp["bl"].data[...]

            Sigma_iw_list.append(S)

    else:
        Sigma_iw_list = None

    Sigma_iw_list = world.bcast(Sigma_iw_list, root=0)

    return Sigma_iw_list
def SIAM(U, e_f, V, D, beta, filename="qmc_results.h5"):
    Delta = V**2 * Flat(D)
    N_MC = 1e5
    l_max = 10
    independent_samples = 16
    for l in range(l_max + 1):
        for i in range(independent_samples):
            S = Solver(beta=beta, gf_struct={'up': [0], 'down': [0]})
            # Initialize the non-interacting Green's function S.G0_iw
            for name, g0 in S.G0_iw:
                g0 << inverse(iOmega_n - e_f - Delta)
            # Run the solver. The results will be in S.G_tau, S.G_iw and S.G_l
            S.solve(
                h_int=U * n('up', 0) * n('down', 0),  # Local Hamiltonian
                n_cycles=int(N_MC / 2**l),  # Number of QMC cycles
                length_cycle=2**l,  # Length of one cycle
                n_warmup_cycles=int(N_MC / 2**l / 100),  #  Warmup cycles
                measure_g_tau=False,  #  Don't measure G_tau
                measure_g_l=False,  #  Don't measure G_l
                perform_post_proc=False,  #  Don't measure G_iw
                use_norm_as_weight=
                True,  # Necessary option for the measurement of the density matrix
                measure_density_matrix=
                True,  # Measure reduced impurity density matrix
                random_seed=i * 8521 + l * 14187 +
                mpi.rank * 7472)  # Random seed, very important!
            # Save the results in an HDF5 file (only on the master node)
            if mpi.is_master_node():
                with HDFArchive(filename) as Results:
                    Results["rho_l{}_i{}".format(l, i)] = S.density_matrix
Beispiel #5
0
def calc_field(plot=True):

    filenames = glob.glob('data_pyed_h_field*.h5')

    out = ParameterCollection()
    d = ParameterCollection(data=[])
    h_vec, m_vec, m_ref_vec = [], [], []

    for filename in filenames:

        print '--> Loading:', filename

        with HDFArchive(filename, 'r') as s:
            p = s['p']
            d.data.append(p)
            h_vec.append(p.h_field)

            m = 0.5 * (-p.G_tau['up'](p.beta) + p.G_tau['dn'](p.beta))
            m_vec.append(np.squeeze(m))

            m_ref_vec.append(p.magnetization)

            # Susceptibilit from quadratic expectation value
            if np.abs(p.h_field) < 1e-9:
                out.chi_exp = p.magnetization2 * 2 * p.beta

    h_vec, m_vec, m_ref_vec = np.array(h_vec), np.array(m_vec), np.array(
        m_ref_vec)
    sidx = np.argsort(h_vec)
    d.h_vec, d.m_vec, d.m_ref_vec = h_vec[sidx], m_vec[sidx], m_ref_vec[sidx]

    from scipy.interpolate import InterpolatedUnivariateSpline as IUS

    spl = IUS(d.h_vec, d.m_ref_vec)
    out.chi = -spl(0, nu=1)  # Linear response
    out.beta = p.beta

    print 'beta, chi, chi_exp =', out.beta, out.chi, out.chi_exp

    filename_out = 'data_pyed_extrap_h_field_beta%6.6f.h5' % out.beta
    with HDFArchive(filename_out, 'w') as s:
        s['field'] = out

    for key, value in out.dict().items():
        setattr(d, key, value)

    if plot: plot_field(d)
Beispiel #6
0
def load_from_DMFT(filename, n_iter=0):
    with HDFArchive(filename, 'r') as QMC:
        G_tau = QMC["G_tau"]
        G_iw = QMC["G_iw"]
        G_l = QMC["G_l"]
        Sigma_iw = QMC["Sigma"]
        G_iw_list = [QMC["G_iw_iter{}".format(i)] for i in range(n_iter)]
    return G_tau, G_iw, G_l, Sigma_iw, G_iw_list
Beispiel #7
0
def SOM(input_filename="dmft_results.h5", output_filename="som_results.h5"):

    # Read G(\tau) from archive
    # Could be G(i\omega_n) or G_l as well.
    with HDFArchive(input_filename, 'r') as QMC:
        G_tau = QMC["G_tau"]
        G_iw = QMC["G_iw"]
        G_l = QMC["G_l"]

    # Paramagnetic case: average spin up and spin down GF
    g_tau = (G_tau['up'] + G_tau['down']) / 2.
    g_iw = (G_iw['up'] + G_iw['down']) / 2.
    g_l = (G_l['up'] + G_l['down']) / 2.

    # Prepare input data: reduce the number of \tau-slices from 10001 to n_tau
    # reduce the number of Legendre coefficients to n_l
    g_tau_rebinned = rebinning_tau(g_tau, n_tau)
    g_l_cut = cut_coefficients(g_l, n_l)

    # Set the weight function S to a constant (all points of G_tau are equally important)
    S_tau = g_tau_rebinned.copy()
    S_tau.data[:] = 1.0

    S_l = g_l_cut.copy()
    S_l.data[:] = 1.0

    # Construct a SOM object
    #cont = Som(g_tau_rebinned, S_tau, kind = "FermionGf")
    cont = Som(g_l_cut, S_l, kind="FermionGf")

    # Run!
    cont.run(**run_params)

    # Create a real frequency GF obtained with SOM
    g_w = GfReFreq(window=energy_window, n_points=n_w, indices=[0])
    g_w << cont

    # G(\tau) reconstructed from the SOM solution
    g_rec_tau = g_tau_rebinned.copy()
    g_rec_tau << cont

    # On master node, save results to an archive
    if mpi.is_master_node():
        with HDFArchive(output_filename, 'w') as Results:
            Results['g_rec_tau'] = g_rec_tau
            Results['g_w'] = g_w
Beispiel #8
0
def load_from_QMC(filename):
    with HDFArchive(filename, 'r') as QMC:
        G_tau = QMC["G_tau"]
        G_iw = QMC["G_iw"]
        G_l = QMC["G_l"]
        rho = QMC["rho"]
        k_histogram = QMC["k_histogram"]
        average_sign = QMC["average_sign"]
    return G_tau, G_iw, G_l, rho, k_histogram, average_sign
Beispiel #9
0
def load_from_QMC(filename, l_max=10, n_samples=16):
    rho = []
    for l in range(l_max + 1):
        rho.append([])
        for i in range(n_samples):
            with HDFArchive(filename, 'r') as QMC:
                rho[-1].append(
                    make_diagonal_rho(QMC["rho_l{}_i{}".format(l, i)]))
    return rho
Beispiel #10
0
def make_calc(beta=2.0, h_field=0.0):
    
    # ------------------------------------------------------------------
    # -- Hubbard atom with two bath sites, Hamiltonian

    p = ParameterCollection(
        beta = beta,
        h_field = h_field,
        U = 5.0,
        ntau = 40,
        niw = 15,
        )

    p.mu = 0.5*p.U
    
    # ------------------------------------------------------------------

    print '--> Solving SIAM with parameters'
    print p
    
    # ------------------------------------------------------------------

    up, do = 'up', 'dn'
    docc = c_dag(up,0) * c(up,0) * c_dag(do,0) * c(do,0)
    mA = c_dag(up,0) * c(up,0) - c_dag(do,0) * c(do,0)
    nA = c_dag(up,0) * c(up,0) + c_dag(do,0) * c(do,0)

    p.H = -p.mu * nA + p.U * docc + p.h_field * mA
    
    # ------------------------------------------------------------------

    fundamental_operators = [c(up,0), c(do,0)]
    
    ed = TriqsExactDiagonalization(p.H, fundamental_operators, p.beta)

    g_tau = GfImTime(beta=beta, statistic='Fermion', n_points=40, indices=[0])
    g_iw = GfImFreq(beta=beta, statistic='Fermion', n_points=10, indices=[0])

    p.G_tau = BlockGf(name_list=[up,do], block_list=[g_tau]*2, make_copies=True)
    p.G_iw = BlockGf(name_list=[up,do], block_list=[g_iw]*2, make_copies=True)
    
    ed.set_g2_tau(p.G_tau[up], c(up,0), c_dag(up,0))
    ed.set_g2_tau(p.G_tau[do], c(do,0), c_dag(do,0))

    ed.set_g2_iwn(p.G_iw[up], c(up,0), c_dag(up,0))
    ed.set_g2_iwn(p.G_iw[do], c(do,0), c_dag(do,0))

    p.magnetization = ed.get_expectation_value(0.5 * mA)
    p.magnetization2 = ed.get_expectation_value(0.25 * mA * mA)
    
    # ------------------------------------------------------------------
    # -- Store to hdf5
    
    filename = 'data_pyed_h_field_%4.4f.h5' % h_field
    with HDFArchive(filename,'w') as res:
        res['p'] = p
Beispiel #11
0
def calc_dynamic(plot=True):

    filenames = glob.glob('data_cthyb*.h5')
    if len(filenames) != 1: return
    filename = filenames[0]

    print '--> Loading:', filename
    with HDFArchive(filename, 'r') as s:
        p = s['p']

    p.chi_m = p.G2_iw_ph[('up', 'up')] - p.G2_iw_ph[('up', 'do')]
    p.chi = np.sum(p.chi_m.data) / p.beta**2

    with HDFArchive(filename, 'w') as s:
        s['p'] = p

    print 'beta, chi =', p.beta, p.chi

    if plot: plot_dynamic(p)
Beispiel #12
0
def calc_field(plot=True):

    filenames = glob.glob('data_pyed_h_field*.h5')

    out = ParameterCollection(data=[])
    h_vec, m_vec, m_ref_vec = [], [], []

    for filename in filenames:

        print '--> Loading:', filename

        with HDFArchive(filename, 'r') as s:
            p = s['p']
            out.data.append(p)
            h_vec.append(p.h_field)

            m = 0.5 * (-p.G_tau['up'](p.beta) + p.G_tau['dn'](p.beta))
            m_vec.append(np.squeeze(m))

            m_ref_vec.append(p.magnetization)

    h_vec, m_vec, m_ref_vec = np.array(h_vec), np.array(m_vec), np.array(
        m_ref_vec)
    sidx = np.argsort(h_vec)
    out.h_vec, out.m_vec, out.m_ref_vec = h_vec[sidx], m_vec[sidx], m_ref_vec[
        sidx]

    from scipy.interpolate import InterpolatedUnivariateSpline as IUS

    spl = IUS(out.h_vec, out.m_ref_vec)
    out.chi = -spl(0, nu=1)  # Linear response
    out.beta = p.beta

    print 'beta, chi =', out.beta, out.chi

    filename_out = 'data_pyed_extrap_h_field_beta%6.6f.h5' % out.beta
    with HDFArchive(filename_out, 'w') as s:
        s['field'] = out

    if plot: plot_field(out)
Beispiel #13
0
def DMFT(U, e_d, t, beta, filename="dmft_results.h5"):
    # Construct the CT-HYB-QMC solver
    S = Solver(beta=beta, gf_struct={'up': [0], 'down': [0]}, n_l=50)

    # Initialize Delta
    Delta = GfImFreq(beta=beta, indices=[0])
    Delta << t**2 * SemiCircular(half_bandwidth=2 * t)

    # Now do the DMFT loop
    n_iter = 8
    for iter in range(n_iter):

        # Compute new S.G0_iw
        for name, g0 in S.G0_iw:
            g0 << inverse(iOmega_n - e_d - Delta)
        # Run the solver
        S.solve(
            h_int=U * n('up', 0) * n('down', 0),  # Local Hamiltonian
            n_cycles=200000,  # Number of QMC cycles
            length_cycle=50,  # Length of a cycle
            n_warmup_cycles=2000,  # How many warmup cycles
            measure_g_l=True)
        # Compute new Delta with the self-consistency condition while imposing paramagnetism
        g_l = (S.G_l['up'] + S.G_l['down']) / 2.
        Delta.set_from_legendre(t**2 * g_l)

        # Intermediate saves
        if mpi.is_master_node():
            with HDFArchive(filename) as Results:
                Results["G_tau_iter{}".format(iter)] = S.G_tau
                Results["G_iw_iter{}".format(iter)] = S.G_iw
                Results["G_l_iter{}".format(iter)] = S.G_l
                Results["Sigma_iter{}".format(iter)] = S.Sigma_iw

    if mpi.is_master_node():
        with HDFArchive(filename) as Results:
            Results["G_tau"] = S.G_tau
            Results["G_iw"] = S.G_iw
            Results["G_l"] = S.G_l
            Results["Sigma"] = S.Sigma_iw
Beispiel #14
0
    def change_dataset(self, *args):
        self.locked1 = True
        self.quantities = []
        if self.pickle_mode:
            with open(self.h5_file, 'r') as fi:
                self._result = _get_path(self.dataset.get(), pickle.load(fi))
        else:
            with HDFArchive(self.h5_file, 'r') as arx:
                self._result = _get_path(self.dataset.get(), arx)

        for function in dir(self._result):
            if not function.startswith("plot_"):
                continue
            # if there is an error with getting the data
            # we do not want to offer it in the menu
            try:
                getattr(self._result, function).original(self._result)
                self.quantities.append(function[5:])
            except Exception as e:
                print(e)
                pass

        if not hasattr(self._result, 'analyzer_results'):
            ar = []
        elif self._result.matrix_structure is not None and self._result.element_wise:
            m = product(
                *map(range, self._result.effective_matrix_structure))
            ar = [(i, self._get_ar_i(i)) for i in m]
        else:
            ar = [(None, self._result.analyzer_results)]
        for ia, a in ar:
            for key, analyzer in a.iteritems():
                for function in dir(analyzer):
                    if not function.startswith("plot_"):
                        continue
                    # if there is an error with getting the data
                    # we do not want to offer it in the menu
                    try:
                        getattr(analyzer, function).\
                            original(analyzer, self._result, element=ia)
                        ky = key + ': ' + function[5:]
                        if ky not in self.quantities:
                            self.quantities.append(ky)
                    except:
                        pass

        self.update_quantity_ui()
        self.locked1 = False
        self.update_plot()
Beispiel #15
0
def write_TarGZ_HDFArchive(filename, **kwargs):

    import os
    import tarfile
    from pytriqs.archive import HDFArchive

    filename = filename.split('.')[0]
    filename_h5 = filename + '.h5'
    filename_tar = filename + '.tar.gz'

    with HDFArchive(filename_h5, 'w') as res:
        for key, value in kwargs.items():
            res[key] = value

    with tarfile.open(filename_tar, 'w:gz') as tar:
        tar.add(filename_h5)

    os.remove(filename_h5)
Beispiel #16
0
def read_TarGZ_HDFArchive(filename):

    import tarfile
    from tempfile import NamedTemporaryFile

    tar = tarfile.open(filename, "r:gz")
    f = tar.extractfile(tar.getmembers()[0])

    tmp = NamedTemporaryFile(delete=False)
    tmp.write(f.read())
    tmp.close()

    with HDFArchive(tmp.name, 'r') as res:
        p = res['p']

    os.remove(tmp.name)

    return p
Beispiel #17
0
def read_TarGZ_HDFArchive(filename):

    import os
    import tarfile
    from tempfile import NamedTemporaryFile
    from pytriqs.archive import HDFArchive

    tar = tarfile.open(filename, "r:gz")
    f = tar.extractfile(tar.getmembers()[0])

    tmp = NamedTemporaryFile(delete=False)
    tmp.write(f.read())
    tmp.close()

    data = HDFArchive(tmp.name, 'r')

    os.remove(tmp.name)

    return data
Beispiel #18
0
    def test_ortho(self):
        self.proj_gr.orthogonalize()

        dens_mat, overl = self.proj_sh.density_matrix(self.el_struct)

#        testout = _rpath + 'projortho.out.test'
#        with open(testout, 'wt') as f:
#            f.write("density matrix: %s\n"%(dens_mat))
#            f.write("overlap matrix: %s\n"%(overl))
        testout = _rpath + 'projortho.test.h5'
        with HDFArchive(testout, 'w') as h5test:
            h5test['density_matrix'] = dens_mat
            h5test['overlap_matrix'] = overl

# FIXME: seems redundant, as 'overl' is written to the file anyway
        self.assertEqual(overl, np.eye(5))

#        expected_file = _rpath + 'projortho.out'
        expected_file = _rpath + 'projortho.out.h5'
#        self.assertFileEqual(testout, expected_file)
        self.assertH5FileEqual(testout, expected_file)
Beispiel #19
0
def initialize_outputfile(iter_):

    if world.Get_rank() == 0:
        if iter_ < 10:
            filename = data_folder + "/iteration_00" + str(iter_) + ".h5"
        elif iter_ < 100:
            filename = data_folder + "/iteration_0" + str(iter_) + ".h5"
        elif iter_ < 1000:
            filename = data_folder + "/iteration_" + str(iter_) + ".h5"
        else:
            print 'too many iterations...'
            exit()

        print 'filename', filename
        results = HDFArchive(filename, 'w')

        import inspect
        import __main__
        source = inspect.getsource(__main__)
        results["source_file"] = source

        return results
Beispiel #20
0
def make_calc(beta=2.0, nwf=8):
    
    # ------------------------------------------------------------------
    # -- Hubbard atom with two bath sites, Hamiltonian

    p = ParameterCollection(
        beta = beta,
        U = 5.0,
        nw = 1,
        nwf = nwf,
        nwf_gf = 2*nwf,
        )

    ana = analytic_hubbard_atom(**p.dict())

    p.chi = np.sum(ana.chi_m.data) / p.beta**2
    
    # ------------------------------------------------------------------
    # -- Store to hdf5
    
    filename = 'data_dynamic_beta%6.6f_nwf%i.h5' % (p.beta, p.nwf)
    with HDFArchive(filename,'w') as res:
        res['p'] = p
Beispiel #21
0
    def test_ortho_normion(self):
        self.proj_gr.normion = True
        self.proj_gr.orthogonalize()

        dens_mat, overl = self.proj_sh.density_matrix(self.el_struct)

        #        testout = _rpath + 'projortho_normion.out.test'
        #        with open(testout, 'wt') as f:
        #            f.write("density matrix: %s\n"%(dens_mat))
        #            f.write("overlap matrix: %s\n"%(overl))
        testout = _rpath + 'projortho_normion.test.h5'
        with HDFArchive(testout, 'w') as h5test:
            h5test['density_matrix'] = dens_mat
            h5test['overlap_matrix'] = overl

# FIXME: redundant
        self.assertEqual(overl[0, 0, ...], np.eye(5))
        self.assertEqual(overl[0, 1, ...], np.eye(5))

        #        expected_file = _rpath + 'projortho_normion.out'
        #        self.assertFileEqual(testout, expected_file)
        expected_file = _rpath + 'projortho_normion.out.h5'
        self.assertH5FileEqual(testout, expected_file)
Beispiel #22
0
 def get_datasets(self, ar=None, path=''):
     ret = []
     is_dir = False
     if ar is None:
         if self.pickle_mode:
             with open(self.h5_file, 'r') as fi:
                 return self.get_datasets(pickle.load(fi), path)
         else:
             with HDFArchive(self.h5_file, 'r') as ar:
                 return self.get_datasets(ar, path)
     # we detect whether it is a dataset directory
     if isinstance(ar, MaxEntResultData):
         is_dir = True
         if len(path) == 0:
             path = '/'
         ret.append(path)
         return ret
     for key in ar:
         try:
             ret += self.get_datasets(ar[key], path + '/' + key)
         except:
             pass
     return ret
Beispiel #23
0
# Now split using the total number of particles, N = N_up + N_dn
ad = AtomDiag(H, fops, [N_up + N_dn])
print ad.n_subspaces  # 7

# Split the Hilbert space automatically
ad = AtomDiag(H, fops)
print ad.n_subspaces  # 28

# Partition function for inverse temperature \beta=3
beta = 3
print partition_function(ad, beta)

# Equilibrium density matrix
dm = atomic_density_matrix(ad, beta)

# Expectation values of orbital double occupancies
print trace_rho_op(dm, n('up', 0) * n('dn', 0), ad)
print trace_rho_op(dm, n('up', 1) * n('dn', 1), ad)
print trace_rho_op(dm, n('up', 2) * n('dn', 2), ad)

# Atomic Green's functions
gf_struct = [['dn', orb_names], ['up', orb_names]]
G_w = atomic_g_w(ad, beta, gf_struct, (-2, 2), 400, 0.01)
G_tau = atomic_g_tau(ad, beta, gf_struct, 400)
G_iw = atomic_g_iw(ad, beta, gf_struct, 100)
G_l = atomic_g_l(ad, beta, gf_struct, 20)

# Finally, we save our AtomDiag object for later use
with HDFArchive('atom_diag_example.h5') as ar:
    ar['ad'] = ad
Beispiel #24
0
# This plotting script is largely based on a work of Malte Harland
# [email protected]

from pytriqs.gf.local import *
from pytriqs.gf.local.descriptors import *
from pytriqs.archive import HDFArchive
from pytriqs.statistics.histograms import Histogram
from matplotlib import pyplot as plt
from matplotlib.backends.backend_pdf import PdfPages
import numpy as np
from scipy.integrate import quad

from dos import A, e_min, e_max, e_th, e_con

arch = HDFArchive('microgap.h5', 'r')
pp = PdfPages('microgap.pdf')

abs_errors = arch['abs_errors']


def make_ref(g):
    print "Norm of the reference DOS:", quad(A, e_min, e_max,
                                             limit=100)[0].real
    g << Function(lambda w: -1j * np.pi * A(w.real))


def plot_A_w(g_w, g_w_ref, fig):
    w_mesh = [w for w in g_w.mesh]

    ax = fig.add_axes([.1, .54, .55, .4])
    ax.plot(w_mesh,
Beispiel #25
0
    # ------------------------------------------------------------------
    # -- Collect results

    d.Sigma_iw = solv.Sigma_iw['up']
    d.G_tau = solv.G_tau['up']
    d.G_l = solv.G_l['up']
    d.G0_iw = solv.G0_iw['up']

    d.G_iw = G_iw['up']
    d.Gl_tau = G_tau['up']

    d.runtime = runtime
    d.G2_tau = solv.g4_tau[('up', 'do')]
    d.G2_iw = solv.g4_iw[('up', 'do')]
    d.G2_iw_pp = solv.g4_iw_pp[('up', 'do')]
    d.G2_iw_ph = solv.g4_iw_ph[('up', 'do')]

    d.mpi_size = mpi.size

    d.perturbation_order = solv.perturbation_order
    d.perturbation_order_total = solv.perturbation_order_total

    # ------------------------------------------------------------------
    # -- Store results
    if mpi.is_master_node():
        filename = 'data_cthyb.h5'
        with HDFArchive(filename, 'w') as res:
            for key, value in d.__dict__.items():
                res[key] = value
Beispiel #26
0
import numpy as np
import matplotlib.pyplot as plt

# ----------------------------------------------------------------------

from pytriqs.archive import HDFArchive
from pytriqs.gf import MeshBrillouinZone

# ----------------------------------------------------------------------
if __name__ == '__main__':

    filename = 'data_e_k_and_chi00_wk.h5'

    with HDFArchive(filename, 'r') as arch:
        e_k = arch['e_k']

    k = np.linspace(-0.5, 0.5, num=200) * 2. * np.pi
    Kx, Ky = np.meshgrid(k, k)

    e_k_interp = np.vectorize(lambda kx, ky: e_k([kx, ky, 0])[0, 0].real)
    e_k_interp = e_k_interp(Kx, Ky)

    plt.imshow(
        e_k_interp,
        cmap=plt.get_cmap('RdBu'),
        extent=(k.min(), k.max(), k.min(), k.max()),
        origin='lower',
    )
    plt.colorbar()

    plt.contour(Kx, Ky, e_k_interp, levels=[0])
Beispiel #27
0
# This plotting script is largely based on a work of Malte Harland
# [email protected]

from pytriqs.gf.local import *
from pytriqs.gf.local.descriptors import *
from pytriqs.archive import HDFArchive
from pytriqs.statistics.histograms import Histogram
from matplotlib import pyplot as plt
from matplotlib.backends.backend_pdf import PdfPages
import numpy as np
from scipy.integrate import quad

from dos import A, e_min, e_max, e_low

arch = HDFArchive('triangles.h5','r')
pp = PdfPages('triangles.pdf')

abs_errors = arch['abs_errors']

def make_ref(g):
    print "Norm of the reference DOS:", quad(A, e_min, e_max, limit=100)[0].real
    g << Function(lambda w: -1j*np.pi * A(w.real))

def plot_A_w(g_w, g_w_ref, fig):
    w_mesh = [w for w in g_w.mesh]

    ax = fig.add_axes([.1,.54,.55,.4])
    ax.plot(w_mesh, -g_w.data[:,0,0].imag/np.pi,
            color = 'red', linewidth = 0.6, label = 'SOM')
    ax.plot(w_mesh, -g_w_ref.data[:,0,0].imag/np.pi,
            color = 'blue', linewidth = 0.6, linestyle='dashed', label = 'reference')
Beispiel #28
0
# Solver parameters
p = {}
p["max_time"] = -1
p["length_cycle"] = 50
p["n_warmup_cycles"] = 50
p["n_cycles"] = 5000

Converter = Wien2kConverter(filename=dft_filename, repacking=True)
Converter.convert_dft_input()
mpi.barrier()

previous_runs = 0
previous_present = False
if mpi.is_master_node():
    f = HDFArchive(dft_filename+'.h5','a')
    if 'dmft_output' in f:
        ar = f['dmft_output']
        if 'iterations' in ar:
            previous_present = True
            previous_runs = ar['iterations']
    else:
        f.create_group('dmft_output')
    del f
previous_runs    = mpi.bcast(previous_runs)
previous_present = mpi.bcast(previous_present)

SK=SumkDFT(hdf_file=dft_filename+'.h5',use_dft_blocks=use_blocks,h_field=h_field)

n_orb = SK.corr_shells[0]['dim']
l = SK.corr_shells[0]['l']
Beispiel #29
0
def make_calc():

    # ------------------------------------------------------------------
    # -- Hamiltonian

    p = ParameterCollection(
        beta = 0.5,
        U = 0.5,
        nw = 1,
        nwf = 15,
        V = 1.0,
        eps = 0.2,
        )

    p.nwf_gf = 4 * p.nwf
    p.mu = 0.5*p.U

    # ------------------------------------------------------------------

    ca_up, cc_up = c('0', 0), c_dag('0', 0)
    ca_do, cc_do = c('0', 1), c_dag('0', 1)

    ca0_up, cc0_up = c('1', 0), c_dag('1', 0)
    ca0_do, cc0_do = c('1', 1), c_dag('1', 1)

    docc = cc_up * ca_up * cc_do * ca_do
    nA = cc_up * ca_up + cc_do * ca_do
    hybridiz = p.V * (cc0_up * ca_up + cc_up * ca0_up + cc0_do * ca_do + cc_do * ca0_do)
    bath_lvl = p.eps * (cc0_up * ca0_up + cc0_do * ca0_do)

    p.H_int = p.U * docc
    p.H = -p.mu * nA + p.H_int + hybridiz + bath_lvl

    # ------------------------------------------------------------------
    # -- Exact diagonalization

    # Conversion from TRIQS to Pomerol notation for operator indices
    # TRIQS:   block_name, inner_index
    # Pomerol: site_label, orbital_index, spin_name
    index_converter = {
        ('0', 0) : ('loc', 0, 'up'),
        ('0', 1) : ('loc', 0, 'down'),
        ('1', 0) : ('loc', 1, 'up'),
        ('1', 1) : ('loc', 1, 'down'),
        }

    # -- Create Exact Diagonalization instance
    ed = PomerolED(index_converter, verbose=True)
    ed.diagonalize(p.H) # -- Diagonalize H

    p.gf_struct = [['0', [0, 1]]]

    # -- Single-particle Green's functions
    p.G_iw = ed.G_iw(p.gf_struct, p.beta, n_iw=p.nwf_gf)['0']

    # -- Particle-particle two-particle Matsubara frequency Green's function
    opt = dict(
        beta=p.beta, gf_struct=p.gf_struct,
        blocks=set([("0", "0")]),
        n_iw=p.nw, n_inu=p.nwf)

    p.G2_iw_ph = ed.G2_iw_inu_inup(channel='PH', **opt)[('0', '0')]

    filename = 'data_pomerol.h5'
    with HDFArchive(filename,'w') as res:
        res['p'] = p

    import os
    os.system('tar czvf data_pomerol.tar.gz data_pomerol.h5')
    os.remove('data_pomerol.h5')
#function to extract density for a given mu, to be used by dichotomy function to determine mu
def Dens(mu):
    dens = SK(mu=mu, Sigma=Sigma_lat).total_density()
    if abs(dens.imag) > 1e-20:
        mpi.report(
            "Warning: Imaginary part of density will be ignored ({})".format(
                str(abs(dens.imag))))
    return dens.real


#check if there are previous runs in the outfile and if so restart from there
previous_runs = 0
previous_present = False
mu = 0.
if mpi.is_master_node():
    ar = HDFArchive(outfile + '.h5', 'a')
    if 'iterations' in ar:
        previous_present = True
        previous_runs = ar['iterations']
        S.Sigma_iw = ar['Sigma_iw']
        mu = ar['mu-%d' % previous_runs]
        del ar
previous_runs = mpi.bcast(previous_runs)
previous_present = mpi.bcast(previous_present)
S.Sigma_iw = mpi.bcast(S.Sigma_iw)
mu = mpi.bcast(mu)

for iteration_number in range(1, nloops + 1):
    it = iteration_number + previous_runs
    if mpi.is_master_node():
        print('-----------------------------------------------')