def read_eshdf_eig_data(filename, Ef_list):
    import numpy as np
    from numpy import array, pi
    from numpy.linalg import inv
    from unit_converter import convert
    from hdfreader import read_hdf
    from developer import error

    def h5int(i):
        return array(i, dtype=int)[0]

    #end def h5int

    h = read_hdf(filename, view=True)
    axes = array(h.supercell.primitive_vectors)
    kaxes = 2 * pi * inv(axes).T
    nk = h5int(h.electrons.number_of_kpoints)
    ns = h5int(h.electrons.number_of_spins)
    if len(Ef_list) != ns:
        msg = 'Ef "%s" must have same length as nspin=%d' % (str(Ef_list), ns)
        error(msg)
    data = obj()
    for k in range(nk):
        kp = h.electrons['kpoint_' + str(k)]
        for s, Ef in zip(range(ns), Ef_list):
            E_fermi = Ef + 1e-8
            eig_s = []
            path = 'electrons/kpoint_{0}/spin_{1}'.format(k, s)
            spin = h.get_path(path)
            eig = convert(array(spin.eigenvalues), 'Ha', 'eV')
            nst = h5int(spin.number_of_states)
            for st in range(nst):
                e = eig[st]
                if e < E_fermi:
                    eig_s.append(e)
                #end if
            #end for
            data[k, s] = obj(
                kpoint=array(kp.reduced_k),
                eig=array(eig_s),
            )
        #end for
    #end for
    res = obj(
        orbfile=filename,
        axes=axes,
        kaxes=kaxes,
        nkpoints=nk,
        nspins=ns,
        data=data,
    )
    return res
Exemple #2
0
def read_eshdf_nofk_data(filename, Ef):
    from numpy import array, pi, dot, sqrt, abs, zeros
    from numpy.linalg import inv, det
    from hdfreader import read_hdf

    def h5int(i):
        return array(i, dtype=int)[0]

    #end def h5int

    # Use slightly shifted Fermi energy
    E_fermi = Ef + 1e-8

    # Open the HDF file w/o loading the arrays into memory (view mode)
    vlog('Reading ' + filename)
    h = read_hdf(filename, view=True)

    # Get the G-vectors in cell coordinates
    gvu = array(h.electrons.kpoint_0.gvectors)

    # Get the untiled cell axes
    axes = array(h.supercell.primitive_vectors)

    # Compute the k-space cell axes
    kaxes = 2 * pi * inv(axes).T

    # Convert G-vectors from cell coordinates to atomic units
    gv = dot(gvu, kaxes)

    # Get number of kpoints/twists, spins, and G-vectors
    nkpoints = h5int(h.electrons.number_of_kpoints)
    nspins = h5int(h.electrons.number_of_spins)
    ngvecs = len(gv)

    # Process the orbital data
    data = obj()
    for k in range(nkpoints):
        vlog('Processing k-point {:>3}'.format(k), n=1, time=True)
        kin_k = obj()
        eig_k = obj()
        k_k = obj()
        nk_k = obj()
        nelec_k = zeros((nspins, ), dtype=float)
        kp = h.electrons['kpoint_' + str(k)]
        gvs = dot(array(kp.reduced_k), kaxes)
        gvk = gv.copy()
        for d in range(3):
            gvk[:, d] += gvs[d]
        #end for
        kinetic = (gvk**2).sum(1) / 2  # Hartree units
        for s in range(nspins):
            kin_s = []
            eig_s = []
            k_s = gvk
            nk_s = zeros((ngvecs, ), dtype=float)
            nelec_s = 0
            path = 'electrons/kpoint_{0}/spin_{1}'.format(k, s)
            spin = h.get_path(path)
            eigs = convert(array(spin.eigenvalues), 'Ha', 'eV')
            nstates = h5int(spin.number_of_states)
            for st in range(nstates):
                eig = eigs[st]
                if eig < E_fermi:
                    stpath = path + '/state_{0}/psi_g'.format(st)
                    psi = array(h.get_path(stpath))
                    nk_orb = (psi**2).sum(1)
                    kin_orb = (kinetic * nk_orb).sum()
                    nelec_s += nk_orb.sum()
                    nk_s += nk_orb
                    kin_s.append(kin_orb)
                    eig_s.append(eig)
                #end if
            #end for
            data[k, s] = obj(
                kpoint=array(kp.reduced_k),
                kin=array(kin_s),
                eig=array(eig_s),
                k=k_s,
                nk=nk_s,
                ne=nelec_s,
            )
        #end for
    #end for
    res = obj(
        orbfile=filename,
        E_fermi=E_fermi,
        axes=axes,
        kaxes=kaxes,
        nkpoints=nkpoints,
        nspins=nspins,
        data=data,
    )

    return res
Exemple #3
0
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
import helpers
import hdfreader

from scipy.interpolate import interp1d

#Grid to interpolate to
wacam_heights_a = open('./pickle/heightpickle.obj', 'rb')
wacam_heights = pickle.load(wacam_heights_a)
wacam_heights = np.append(wacam_heights[:-1], np.array(
    [0])) * 1000  #Extend the bottom layer to 0km from 0.61km

# ================= O3 ====================
df_o3 = hdfreader.read_hdf('../ndacc', 'O3')
o3_retr_time = hdfreader.find_closest_retrival(df_o3, "200309T7:15:00")
print("Closest O3 retrival", o3_retr_time)

o3_selected_retrival = df_o3.loc[o3_retr_time][
    'O3.MIXING.RATIO.VOLUME_ABSORPTION.SOLAR']
o3_altitude = df_o3.loc[o3_retr_time]['ALTITUDE']
o3_pressure = df_o3.loc[o3_retr_time]['PRESSURE_INDEPENDENT']
o3_temperature = df_o3.loc[o3_retr_time]['TEMPERATURE_INDEPENDENT']

#interp_o3_f = interp1d(o3_altitude,o3_selected_retrival,fill_value="extrapolate", kind="linear")
#interp_o3 = interp_o3_f(wacam_heights)

plt.plot(o3_selected_retrival, o3_altitude)
plt.savefig('o3_profile')
Exemple #4
0
    def test_read():
        import os
        import numpy as np
        import h5py
        from hdfreader import read_hdf

        ds = 'string value'
        di = 100
        df = np.pi
        das = np.array(tuple('abcdefghijklmnopqrstuvwxyz'), dtype=bytes)
        dai = np.arange(20, dtype=np.int64)
        daf = 0.1 * np.arange(20, dtype=np.float64)

        path = testing.setup_unit_test_output_directory(
            'hdfreader', 'test_read')

        def add_datasets(g):
            g.create_dataset('sdata', data=das)
            g.create_dataset('idata', data=dai)
            g.create_dataset('fdata', data=daf)

        #end def add_datasets

        def add_attrs(g):
            g.attrs['sval'] = ds
            g.attrs['ival'] = di
            g.attrs['fval'] = df

        #end def add_attrs

        def add_group(g, label=''):
            g = g.create_group('group' + str(label))
            add_attrs(g)
            add_datasets(g)
            return g

        #end def add_group

        testfile = os.path.join(path, 'test.h5')
        f = h5py.File(testfile, 'w')

        add_datasets(f)
        g1 = add_group(f, 1)
        g2 = add_group(f, 2)
        g11 = add_group(g1, 1)
        g12 = add_group(g1, 2)
        g21 = add_group(g2, 1)
        g22 = add_group(g2, 2)

        f.close()

        def check_datasets(g):
            assert (value_eq(g.sdata, das))
            assert (value_eq(g.idata, dai))
            assert (value_eq(g.fdata, daf))

        #end def check_datasets

        def check_groups(g):
            assert ('group1' in g)
            assert ('group2' in g)
            check_datasets(g.group1)
            check_datasets(g.group2)

        #end def check_groups

        h = read_hdf(testfile)

        check_datasets(h)
        check_groups(h)
        check_groups(h.group1)
        check_groups(h.group2)