예제 #1
0
def test_velocity_traj():
    # Test Trajectory.get_velocity() against velocities output from CP2K. The
    # agreement is very good. Works only for fixed-cell MDs, however!
    dr = 'files/cp2k/md/nvt_print_low'
    base = os.path.dirname(dr) 
    fn = '%s/cp2k.out' %dr
    print common.backtick('tar -C {0} -xzf {1}.tgz'.format(base,dr))
    tr = io.read_cp2k_md(fn)
    # read from data file
    v1 = tr.velocity.copy()
    # If tr.velocity != None, then get_velocity() doesn't calculate it. Then,
    # it simply returns tr.velocity, which is what we of course usually want.
    tr.velocity = None
    # calculate from coords + time step, b/c of central diffs, only steps 1:-1
    # are the same
    v2 = tr.get_velocity()
    print ">>>> np.abs(v1).max()", np.abs(v1).max()
    print ">>>> np.abs(v1).min()", np.abs(v1).min()
    print ">>>> np.abs(v2).max()", np.abs(v2).max()
    print ">>>> np.abs(v2).min()", np.abs(v2).min()
    print ">>>> np.abs(v1-v2).max()", np.abs(v1-v2).max()
    print ">>>> np.abs(v1-v2).min()", np.abs(v1-v2).min()
    assert np.allclose(v1[1:-1,...], v2[1:-1,...], atol=1e-4)
    
    ##from pwtools import mpl
    ##fig,ax = mpl.fig_ax()
    ##ax.plot(v1[1:-1,:,0], 'b')
    ##ax.plot(v2[1:-1,:,0], 'r')
    ##mpl.plt.show()
    
    shape = (100,10,3)
    arr = np.random.rand(*shape)
    assert crys.velocity_traj(arr, axis=0).shape == shape
    assert crys.velocity_traj(arr, axis=0, endpoints=False).shape == (98,10,3)
예제 #2
0
 def omp_num_threads(action='check', num=1, omp_dct=OMP_DCT, err=False):
     key = 'OMP_NUM_THREADS'
     has_key = os.environ.has_key(key)
     if action == 'check':
         if has_key:
             print "[omp_num_threads] os.environ['%s']: %s" %(key, os.environ[key])
             print "[omp_num_threads] shell$ echo %s" %(key)
             print backtick('echo $%s' %key)
             if err and os.environ[key] != '3':
                 return 'err'
         else:
             print "[omp_num_threads] no os.environ['%s']" %key
             if err:
                 return 'err'
     elif action == 'backup':
         if has_key:
             print "[omp_num_threads] backup os.environ['%s'] = '%s'" %(key, os.environ[key])
             omp_dct['num_threads'] = os.environ[key]
         else:            
             omp_dct['num_threads'] = None
     elif action == 'restore':
         if has_key:
             print "[omp_num_threads] restoring os.environ['%s'] = '%s'" \
                 %(key, omp_dct['num_threads'])
             os.environ[key] = omp_dct['num_threads']
     elif action == 'set':
         print "[omp_num_threads] setting os.environ['%s'] = '%s'" %(key, str(num))
         os.environ[key] = str(num)
예제 #3
0
def test_cp2k_scf():
    attr_lst = parse.Cp2kSCFOutputFile().attr_lst
    for base in ['cp2k.scf.out.print_low', 'cp2k.scf.out.print_medium']:
        fn = 'files/cp2k/scf/%s' %base
        print "testing: %s" %fn
        print common.backtick("gunzip %s.gz" %fn)
        st = io.read_cp2k_scf(fn)
        assert_attrs_not_none(st, attr_lst=attr_lst)
예제 #4
0
def test_cp2k_md():
    attr_lst = parse.Cp2kMDOutputFile().attr_lst
    # This parser and others have get_econst(), but not all, so ATM it's not
    # part of the Trajectory API
    attr_lst.pop(attr_lst.index('econst'))
    for dr in ['files/cp2k/md/npt_f_print_low', 'files/cp2k/md/nvt_print_low']:
        base = os.path.dirname(dr) 
        fn = '%s/cp2k.out' %dr
        print "testing: %s" %fn
        print common.backtick('tar -C {0} -xzf {1}.tgz'.format(base,dr))
        tr = io.read_cp2k_md(fn)
        assert_attrs_not_none(tr, attr_lst=attr_lst)        
        pp = parse.Cp2kMDOutputFile(fn)
        forces_outfile = pp._get_forces_from_outfile()*Ha/Bohr/eV*Ang
        assert np.allclose(forces_outfile, tr.forces, rtol=1e-3)
예제 #5
0
    def _fit(self):
        # volume[Bohr^3] etot[Ha] for eos.x
        volume = self.volume * (Ang**3.0 / Bohr**3.0)
        energy = self.energy * (eV / Ha)
        data = np.array([volume, energy]).T
        infn_txt =\
        """
%s
%i
%i
%f,  %f,  %i
%i
%s
        """%(self.name,
             self.natoms,
             self.etype,
             volume[0], volume[-1], self.npoints,
             len(volume),
             common.str_arr(data))
        common.file_write(self.infn, infn_txt)
        out = common.backtick('cd %s && %s' % (self.dir, self.app_basename))
        if self.verbose:
            print(out)
            print((open(os.path.join(self.dir, 'PARAM.OUT')).read()))
        # Remove normalization on natoms. See .../eos/output.f90:
        # fitev: [volume [Bohr^3] / natoms, energy [Ha] / natoms]
        # fitpv: [volume [Bohr^3] / natoms, pressure [GPa]]
        fitev = np.loadtxt(os.path.join(self.dir, 'EVPAI.OUT')) * self.natoms
        # convert energy back to [Ang^3, eV]
        fitev[:, 0] *= (Bohr**3 / Ang**3)
        fitev[:, 1] *= (Ha / eV)
        self.ev = fitev
        fitpv = np.loadtxt(os.path.join(self.dir, 'PVPAI.OUT'))
        fitpv[:, 0] *= (self.natoms * Bohr**3 / Ang**3)
        self.pv = fitpv
예제 #6
0
파일: eos.py 프로젝트: elcorto/pwtools
    def _fit(self):
        # volume[Bohr^3] etot[Ha] for eos.x
        volume = self.volume*(Ang**3.0 / Bohr**3.0)
        energy = self.energy*(eV / Ha)
        data = np.array([volume, energy]).T
        infn_txt =\
        """
%s
%i
%i
%f,  %f,  %i
%i
%s
        """%(self.name, 
             self.natoms, 
             self.etype, 
             volume[0], volume[-1], self.npoints,
             len(volume), 
             common.str_arr(data))
        common.file_write(self.infn, infn_txt)
        out = common.backtick('cd %s && %s' %(self.dir, self.app_basename))
        if self.verbose:
            print out
            print(open(os.path.join(self.dir,'PARAM.OUT')).read())
        # Remove normalization on natoms. See .../eos/output.f90:
        # fitev: [volume [Bohr^3] / natoms, energy [Ha] / natoms]
        # fitpv: [volume [Bohr^3] / natoms, pressure [GPa]]
        fitev = np.loadtxt(os.path.join(self.dir,'EVPAI.OUT')) * self.natoms
        # convert energy back to [Ang^3, eV]
        fitev[:,0] *= (Bohr**3 / Ang**3)
        fitev[:,1] *= (Ha / eV)
        self.ev = fitev
        fitpv = np.loadtxt(os.path.join(self.dir,'PVPAI.OUT'))
        fitpv[:,0] *= (self.natoms * Bohr**3 / Ang**3)
        self.pv = fitpv
예제 #7
0
def test_velocity_traj():
    # Test Trajectory.get_velocity() against velocities output from CP2K. The
    # agreement is very good. Works only for fixed-cell MDs, however!
    dr = 'files/cp2k/md/nvt_print_low'
    base = os.path.dirname(dr) 
    fn = '%s/cp2k.out' %dr
    print(common.backtick('tar -C {0} -xzf {1}.tgz'.format(base,dr)))
    tr = io.read_cp2k_md(fn)
    # read from data file
    v1 = tr.velocity.copy()
    # If tr.velocity != None, then get_velocity() doesn't calculate it. Then,
    # it simply returns tr.velocity, which is what we of course usually want.
    tr.velocity = None
    # calculate from coords + time step, b/c of central diffs, only steps 1:-1
    # are the same
    v2 = tr.get_velocity()
    print(">>>> np.abs(v1).max()", np.abs(v1).max())
    print(">>>> np.abs(v1).min()", np.abs(v1).min())
    print(">>>> np.abs(v2).max()", np.abs(v2).max())
    print(">>>> np.abs(v2).min()", np.abs(v2).min())
    print(">>>> np.abs(v1-v2).max()", np.abs(v1-v2).max())
    print(">>>> np.abs(v1-v2).min()", np.abs(v1-v2).min())
    assert np.allclose(v1[1:-1,...], v2[1:-1,...], atol=1e-4)
    
    ##from pwtools import mpl
    ##fig,ax = mpl.fig_ax()
    ##ax.plot(v1[1:-1,:,0], 'b')
    ##ax.plot(v2[1:-1,:,0], 'r')
    ##mpl.plt.show()
    
    shape = (100,10,3)
    arr = np.random.rand(*shape)
    assert crys.velocity_traj(arr, axis=0).shape == shape
    assert crys.velocity_traj(arr, axis=0, endpoints=False).shape == (98,10,3)
예제 #8
0
 def omp_num_threads(action='check', num=1, omp_dct=OMP_DCT, err=False):
     key = 'OMP_NUM_THREADS'
     has_key = key in os.environ
     if action == 'check':
         if has_key:
             print("[omp_num_threads] os.environ['%s']: %s" %
                   (key, os.environ[key]))
             print("[omp_num_threads] shell$ echo %s" % (key))
             print(backtick('echo $%s' % key))
             if err and os.environ[key] != '3':
                 return 'err'
         else:
             print("[omp_num_threads] no os.environ['%s']" % key)
             if err:
                 return 'err'
     elif action == 'backup':
         if has_key:
             print("[omp_num_threads] backup os.environ['%s'] = '%s'" %
                   (key, os.environ[key]))
             omp_dct['num_threads'] = os.environ[key]
         else:
             omp_dct['num_threads'] = None
     elif action == 'restore':
         if has_key:
             print("[omp_num_threads] restoring os.environ['%s'] = '%s'" \
                 %(key, omp_dct['num_threads']))
             os.environ[key] = omp_dct['num_threads']
     elif action == 'set':
         print("[omp_num_threads] setting os.environ['%s'] = '%s'" %
               (key, str(num)))
         os.environ[key] = str(num)
예제 #9
0
파일: pwscf.py 프로젝트: zari277/pwtools
def read_dynmat(path='.', natoms=None, filename='dynmat.out', axsf='dynmat.axsf'):
    """Read ``dynmat.x`` output.

    `freqs` are parsed from `filename` and `vecs` from `axsf`. `qpoints` is
    alawys Gamma, i.e. [0,0,0].     

    Output format is the same as in :func:`read_dyn`.

    Parameters
    ----------
    path : str
        path where output files are
    natoms : int
    filename : str
        Text output from dynmat.x, where the frequencies are printed, relative
        to `path`.
    axsf : str
        AXSF file (``filxsf`` in input) with mode vectors as forces.
    
    Returns
    -------
    qpoints, freqs, vecs
    qpoints : 1d array (3,)
        The qpoint, which is Gamma, i.e. [0,0,0]
    freqs : 1d array, (nmodes,) where nmodes = 3*natoms
        3*natoms phonon frequencies in [cm^-1] at the q-point.
    vecs : 3d real array (nmodes, natoms, 3)
        Real parts (???) if the eigenvectors of the dynamical matrix for the
        q-point.
    
    Notes
    -----
    We assume the output to be generated with ``dynmat.x < dynmat.in >
    dynmat.out``. 
    """
    assert natoms is not None, ("natoms is None")
    nmodes = 3*natoms
    out_fn = pj(path, filename)
    axsf_fn = pj(path, axsf)
    cmd = "grep -A{0} PRIMCO {1} | sed -re '/PRIMCO.*/{{N;d;}}' | \
            awk '{{print $5\" \"$6\" \"$7}}'".format(natoms+1, axsf_fn)
    qpoints = np.zeros((3,))
    vecs = np.fromstring(common.backtick(cmd), sep=' ').reshape(nmodes,natoms,3)
    cmd = "grep -A{0} 'mode.*cm-1' {1} | grep -v mode | \
           awk '{{print $2}}'".format(nmodes, out_fn)
    freqs = np.fromstring(common.backtick(cmd), sep=' ')
    return qpoints,freqs,vecs
예제 #10
0
파일: tools.py 프로젝트: elcorto/pwtools
def unpack_compressed(src, prefix='tmp', testdir=testdir, ext=None):
    """Convenience function to uncompress files/some_file.out.gz into a random
    location. Return the filename "path/to/random_location/some_file.out"
    without ".gz", which can be used in subsequent commands.
    
    Supported file types: gz, tgz, tar.gz
        gunzip path/to/random_location/some_file.out.gz
        tar -C path/to/random_location -xzf path/to/random_location/some_file.out.tgz
    
    Other compress formats may be implemented as needed.
    
    Can also be used for slightly more complex unpack business, see for
    example test_cpmd_md.py. 

    Parameters
    ----------
    src : str
        path to the compressed file, i.e. files/some_file.out.gz
    prefix : str, optional
        prefix for mkdtemp(), usually __file__ of the test script
        to identify which test script created the random dir
    testdir : str, optional
        'path/to' in the example above, usually
        ``pwtools.test.testenv.testdir``
    ext : str, optional
        file extension of compressed file ('gz', 'tgz', 'tar.gz'), if None then
        it will be guessed from `src`
    """
    # 'path/to/random_location'
    workdir = tempfile.mkdtemp(dir=testdir, prefix=prefix)
    # 'gz'
    ext = src.split('.')[-1] if ext is None else ext
    # 'some_file.out'
    base = os.path.basename(src).replace('.'+ext, '')
    # path/to/random_location/some_file.out
    filename = '{workdir}/{base}'.format(workdir=workdir, base=base)
    cmd = "mkdir -p {workdir}; cp {src} {workdir}/; "
    if ext == 'gz':
        cmd += "gunzip {filename}.{ext};"
    elif ext in ['tgz', 'tar.gz']:
        cmd += "tar -C {workdir} -xzf {filename}.{ext};"
    else:
        raise StandardError("unsuported file format of file: {}".format(src))
    cmd = cmd.format(workdir=workdir, src=src, filename=filename, ext=ext)
    print common.backtick(cmd)
    assert os.path.exists(filename), "unpack failed: '%s' not found" %filename
    return filename
예제 #11
0
파일: pwscf.py 프로젝트: elcorto/pwtools
def read_dynmat(path='.', natoms=None, filename='dynmat.out', axsf='dynmat.axsf'):
    """Read ``dynmat.x`` output.

    `freqs` are parsed from `filename` and `vecs` from `axsf`. `qpoints` is
    alawys Gamma, i.e. [0,0,0].     

    Output format is the same as in :func:`read_dyn`.

    Parameters
    ----------
    path : str
        path where output files are
    natoms : int
    filename : str
        Text output from dynmat.x, where the frequencies are printed, relative
        to `path`.
    axsf : str
        AXSF file (``filxsf`` in input) with mode vectors as forces.
    
    Returns
    -------
    qpoints, freqs, vecs
    qpoints : 1d array (3,)
        The qpoint, which is Gamma, i.e. [0,0,0]
    freqs : 1d array, (nmodes,) where nmodes = 3*natoms
        3*natoms phonon frequencies in [cm^-1] at the q-point.
    vecs : 3d real array (nmodes, natoms, 3)
        Real parts (???) if the eigenvectors of the dynamical matrix for the
        q-point.
    
    Notes
    -----
    We assume the output to be generated with ``dynmat.x < dynmat.in >
    dynmat.out``. 
    """
    assert natoms is not None, ("natoms is None")
    nmodes = 3*natoms
    out_fn = pj(path, filename)
    axsf_fn = pj(path, axsf)
    cmd = "grep -A{0} PRIMCO {1} | sed -re '/PRIMCO.*/{{N;d;}}' | \
            awk '{{print $5\" \"$6\" \"$7}}'".format(natoms+1, axsf_fn)
    qpoints = np.zeros((3,))
    vecs = np.fromstring(common.backtick(cmd), sep=' ').reshape(nmodes,natoms,3)
    cmd = "grep -A{0} 'mode.*cm-1' {1} | grep -v mode | \
           awk '{{print $2}}'".format(nmodes, out_fn)
    freqs = np.fromstring(common.backtick(cmd), sep=' ')
    return qpoints,freqs,vecs
예제 #12
0
def test_cp2k_scf():
    attr_lst = parse.Cp2kSCFOutputFile().attr_lst
    for base in ['cp2k.scf.out.print_low', 'cp2k.scf.out.print_medium']:
        fn = 'files/cp2k/scf/%s' % base
        print("testing: %s" % fn)
        print(common.backtick("gunzip %s.gz" % fn))
        st = io.read_cp2k_scf(fn)
        assert_attrs_not_none(st, attr_lst=attr_lst)
예제 #13
0
파일: pwscf.py 프로젝트: zari277/pwtools
def read_dynmat_ir_raman(filename='dynmat.out', natoms=None, 
                         cols={1: 'freqs', 3:'ir', 4: 'raman', 5: 'depol'}):
    """Read ``dynmat.x`` text output file and extract IR and Raman
    intensities.
    
    Parameters
    ----------
    filename : str
        dynmat.x text output file (e.g. from ``dynmat.x < dynmat.in >
        dynmat.out``)
    natoms : int
        number of atoms in the cell
    cols : dict
        column numbers of the text block

    Returns
    -------
    cols = None 
        Return the parsed array as found in the file
    cols = dict 
        Return dict with keys from `cols` and 1d arrays ``{'freqs': <array>,
        'ir': <array>, 'raman': <array>, 'depol': <array>}``. If a column is
        not present, the array is None.
 
    Notes
    -----
    The parsed textblock looks like this::

        # mode   [cm-1]    [THz]      IR          Raman   depol.fact
            1      0.00    0.0000    0.0000         0.0005    0.7414
            2      0.00    0.0000    0.0000         0.0005    0.7465
            3      0.00    0.0000    0.0000         0.0018    0.2647
            4    252.27    7.5627    0.0000         0.0073    0.7500
            5    252.27    7.5627    0.0000         0.0073    0.7500
            6    548.44   16.4419    0.0000         0.0000    0.7434
            7    603.32   18.0872   35.9045        18.9075    0.7366
            8    656.82   19.6910    0.0000         7.9317    0.7500
            9    656.82   19.6910    0.0000         7.9317    0.7500
           10    669.67   20.0762   31.5712         5.0265    0.7500
           11    738.22   22.1311    0.0000         0.0000    0.7306
           12    922.64   27.6600   31.5712         5.0265    0.7500
    
    Some columns (e.g. IR, Raman) may be missing.
    """                    
    assert natoms is not None, ("natoms is None")
    cmd = "grep -A{0} 'mode.*cm-1' {1} | grep -v mode".format(3*natoms, filename)
    arr = parse.arr2d_from_txt(common.backtick(cmd))
    if cols is None:
        return arr
    else:
        dct = {}
        for ii,name in cols.items():
            if arr.shape[1] >= (ii+1):
                dct[name] = arr[:,ii]
            else:
                dct[name] = None
        return dct
예제 #14
0
파일: pwscf.py 프로젝트: elcorto/pwtools
def read_dynmat_ir_raman(filename='dynmat.out', natoms=None, 
                         cols={1: 'freqs', 3:'ir', 4: 'raman', 5: 'depol'}):
    """Read ``dynmat.x`` text output file and extract IR and Raman
    intensities.
    
    Parameters
    ----------
    filename : str
        dynmat.x text output file (e.g. from ``dynmat.x < dynmat.in >
        dynmat.out``)
    natoms : int
        number of atoms in the cell
    cols : dict
        column numbers of the text block

    Returns
    -------
    cols = None 
        Return the parsed array as found in the file
    cols = dict 
        Return dict with keys from `cols` and 1d arrays ``{'freqs': <array>,
        'ir': <array>, 'raman': <array>, 'depol': <array>}``. If a column is
        not present, the array is None.
 
    Notes
    -----
    The parsed textblock looks like this::

        # mode   [cm-1]    [THz]      IR          Raman   depol.fact
            1      0.00    0.0000    0.0000         0.0005    0.7414
            2      0.00    0.0000    0.0000         0.0005    0.7465
            3      0.00    0.0000    0.0000         0.0018    0.2647
            4    252.27    7.5627    0.0000         0.0073    0.7500
            5    252.27    7.5627    0.0000         0.0073    0.7500
            6    548.44   16.4419    0.0000         0.0000    0.7434
            7    603.32   18.0872   35.9045        18.9075    0.7366
            8    656.82   19.6910    0.0000         7.9317    0.7500
            9    656.82   19.6910    0.0000         7.9317    0.7500
           10    669.67   20.0762   31.5712         5.0265    0.7500
           11    738.22   22.1311    0.0000         0.0000    0.7306
           12    922.64   27.6600   31.5712         5.0265    0.7500
    
    Some columns (e.g. IR, Raman) may be missing.
    """                    
    assert natoms is not None, ("natoms is None")
    cmd = "grep -A{0} 'mode.*cm-1' {1} | grep -v mode".format(3*natoms, filename)
    arr = parse.arr2d_from_txt(common.backtick(cmd))
    if cols is None:
        return arr
    else:
        dct = {}
        for ii,name in cols.iteritems():
            if arr.shape[1] >= (ii+1):
                dct[name] = arr[:,ii]
            else:
                dct[name] = None
        return dct
예제 #15
0
def test_pwscf_calculator():
    if not have_ase():
        skip("no ASE found, skipping test")
    elif not have_pwx():
        skip("no pw.x found, skipping test")
    else:
        pseudo_dir = pj(testdir, prefix, 'pseudo')
        print common.backtick("mkdir -pv {p}; cp files/qe_pseudos/*.gz {p}/; \
            gunzip {p}/*".format(p=pseudo_dir))
        at = get_atoms_with_calc_pwscf(pseudo_dir)

        print "scf"
        # trigger calculation here
        forces = at.get_forces()
        etot = at.get_potential_energy()
        stress = at.get_stress(voigt=False) # 3x3
        
        st = io.read_pw_scf(at.calc.label + '.out')
        assert np.allclose(forces, st.forces)
        assert np.allclose(etot, st.etot)
        assert np.allclose(st.stress, -stress * constants.eV_by_Ang3_to_GPa)
        
        # files/ase/pw.scf.out.start is a norm-conserving LDA struct,
        # calculated with pz-vbc.UPF, so the PBE vc-relax will make the cell
        # a bit bigger
        print "vc-relax"
        from ase.optimize import BFGS
        from ase.constraints import UnitCellFilter
        opt = BFGS(UnitCellFilter(at))
        cell = parse.arr2d_from_txt("""
            -1.97281509  0.          1.97281509
             0.          1.97281509  1.97281509
            -1.97281509  1.97281509  0.""")        
        assert np.allclose(cell, at.get_cell())
        opt.run(fmax=0.05) # run only 2 steps
        cell = parse.arr2d_from_txt("""
            -2.01837531  0.          2.01837531
             0.          2.01837531  2.01837531
            -2.01837531  2.01837531  0""")
        assert np.allclose(cell, at.get_cell())

        # at least 1 backup files must exist: pw.*.0 is the SCF run, backed up
        # in the first iter of the vc-relax
        assert os.path.exists(at.calc.infile + '.0')
예제 #16
0
파일: pwscf.py 프로젝트: zari277/pwtools
def read_dyn(filename, natoms=None):
    """Read one dynamical matrix file (for 1 qpoint) produced by ``ph.x`` and
    extract the same as :func:`read_matdyn_modes` for this qpoint only. 
    
    All arrays have one dim less compared to :func:`read_matdyn_modes`.
    
    Parameters
    ----------
    filename : str
        Name of dyn file. Example: "ph.dyn3" for qpoint 3.
    natoms : int
        number of atoms in the cell (used for nmodes=3*natoms only)
    
    Returns
    -------
    qpoints, freqs, vecs
    qpoints : 1d array (3,)
        The qpoint of the dyn file.
    freqs : 1d array, (nmodes,) where nmodes = 3*natoms
        3*natoms phonon frequencies in [cm^-1] at the q-point.
    vecs : 3d complex array (nmodes, natoms, 3)
        Complex eigenvectors of the dynamical matrix for the q-point.
    """
    assert natoms is not None
    cmd = r"egrep 'q.*=.*\(' %s | tail -n1 | sed -re 's/.*q\s*=.*\((.*)\)/\1/'" %filename
    qpoints = np.fromstring(common.backtick(cmd), sep=' ')
    assert qpoints.shape == (3,)
    nmodes = 3*natoms
    cmd = r"grep -v 'q.*=' %s | grep '^[ ]*(' | sed -re 's/^\s*\((.*)\)/\1/g'" %filename
    # vecs_file_flat: (nmodes * natoms, 6)
    # this line is the bottleneck
    vecs_file_flat = parse.arr2d_from_txt(common.backtick(cmd))
    vecs_flat = np.empty((vecs_file_flat.shape[0], 3), dtype=complex)
    vecs_flat[:,0] = vecs_file_flat[:,0] + 1j*vecs_file_flat[:,1]
    vecs_flat[:,1] = vecs_file_flat[:,2] + 1j*vecs_file_flat[:,3]
    vecs_flat[:,2] = vecs_file_flat[:,4] + 1j*vecs_file_flat[:,5]
    vecs = vecs_flat.flatten().reshape(nmodes, natoms, 3)
    cmd = r"grep omega %s | sed -re \
            's/.*omega.*=.*\[.*=(.*)\s*\[.*/\1/g'" %filename
    freqs = np.fromstring(common.backtick(cmd), sep=' ')
    return qpoints, freqs, vecs
예제 #17
0
파일: pwscf.py 프로젝트: elcorto/pwtools
def read_dyn(filename, natoms=None):
    """Read one dynamical matrix file (for 1 qpoint) produced by ``ph.x`` and
    extract the same as :func:`read_matdyn_modes` for this qpoint only. 
    
    All arrays have one dim less compared to :func:`read_matdyn_modes`.
    
    Parameters
    ----------
    filename : str
        Name of dyn file. Example: "ph.dyn3" for qpoint 3.
    natoms : int
        number of atoms in the cell (used for nmodes=3*natoms only)
    
    Returns
    -------
    qpoints, freqs, vecs
    qpoints : 1d array (3,)
        The qpoint of the dyn file.
    freqs : 1d array, (nmodes,) where nmodes = 3*natoms
        3*natoms phonon frequencies in [cm^-1] at the q-point.
    vecs : 3d complex array (nmodes, natoms, 3)
        Complex eigenvectors of the dynamical matrix for the q-point.
    """
    assert natoms is not None
    cmd = r"egrep 'q.*=.*\(' %s | tail -n1 | sed -re 's/.*q\s*=.*\((.*)\)/\1/'" %filename
    qpoints = np.fromstring(common.backtick(cmd), sep=' ')
    assert qpoints.shape == (3,)
    nmodes = 3*natoms
    cmd = r"grep -v 'q.*=' %s | grep '^[ ]*(' | sed -re 's/^\s*\((.*)\)/\1/g'" %filename
    # vecs_file_flat: (nmodes * natoms, 6)
    # this line is the bottleneck
    vecs_file_flat = parse.arr2d_from_txt(common.backtick(cmd))
    vecs_flat = np.empty((vecs_file_flat.shape[0], 3), dtype=complex)
    vecs_flat[:,0] = vecs_file_flat[:,0] + 1j*vecs_file_flat[:,1]
    vecs_flat[:,1] = vecs_file_flat[:,2] + 1j*vecs_file_flat[:,3]
    vecs_flat[:,2] = vecs_file_flat[:,4] + 1j*vecs_file_flat[:,5]
    vecs = vecs_flat.flatten().reshape(nmodes, natoms, 3)
    cmd = r"grep omega %s | sed -re \
            's/.*omega.*=.*\[.*=(.*)\s*\[.*/\1/g'" %filename
    freqs = np.fromstring(common.backtick(cmd), sep=' ')
    return qpoints, freqs, vecs
예제 #18
0
def test_lammps_calculator():
    if not have_ase():
        skip("no ASE found, skipping test")
    elif not have_lmp():
        skip("no lammps found, skipping test")
    else:
        at = get_atoms_with_calc_lammps()
        at.rattle(stdev=0.001, seed=int(time.time()))
        common.makedirs(at.calc.directory)
        print common.backtick("cp -v utils/lammps/AlN.tersoff {p}/".format(
            p=at.calc.directory))

        print "scf"
        forces = at.get_forces()
        etot = at.get_potential_energy()
        stress = at.get_stress(voigt=False) # 3x3
        
        st = io.read_lammps_md_txt(at.calc.label + '.out')[0]
        assert np.allclose(forces, st.forces)
        assert np.allclose(etot, st.etot)
        assert np.allclose(st.stress, -stress * constants.eV_by_Ang3_to_GPa,
                           atol=1e-10)
        
        print "relax"
        from ase.optimize import BFGS
        opt = BFGS(at, maxstep=0.04)
        opt.run(fmax=0.001, steps=10)
        coords_frac = parse.arr2d_from_txt("""
            3.3333341909920072e-01    6.6666683819841532e-01    4.4325467247779138e-03
            6.6666681184103216e-01    3.3333362368205072e-01    5.0443254824788963e-01
            3.3333341909918301e-01    6.6666683819838046e-01    3.8356759709402671e-01
            6.6666681184101539e-01    3.3333362368201563e-01    8.8356759861713752e-01
            """)
        assert np.allclose(coords_frac, at.get_scaled_positions(), atol=1e-2)

        # at least 1 backup files must exist
        assert os.path.exists(at.calc.infile + '.0')
        assert os.path.exists(at.calc.outfile + '.0')
        assert os.path.exists(at.calc.dumpfile + '.0')
        assert os.path.exists(at.calc.structfile + '.0')
예제 #19
0
def test_cp2k_cell_opt():
    attr_lst = parse.Cp2kRelaxOutputFile().attr_lst
    attr_lst.pop(attr_lst.index('econst'))
    # There is no PROJECT-frc-1.xyz file, but the input file has 
    #    &force_eval
    #        &print
    #            &forces
    #            &end forces
    #        &end print
    #    &end force_eval
    # therefore we can parse forces from the outfile.
    none_attrs = ['ekin',
                  'temperature',
                  'timestep',
                  'velocity',
                  ]
    for dr in ['files/cp2k/cell_opt/cell_opt']:
        base = os.path.dirname(dr) 
        fn = '%s/cp2k.out' %dr
        print "testing: %s" %fn
        print common.backtick('tar -C {0} -xzf {1}.tgz'.format(base,dr))
        tr = io.read_cp2k_relax(fn)
        assert_attrs_not_none(tr, attr_lst=attr_lst, none_attrs=none_attrs)        
예제 #20
0
def test_cp2k_md():
    attr_lst = parse.Cp2kMDOutputFile().attr_lst
    # This parser and others have get_econst(), but not all, so ATM it's not
    # part of the Trajectory API
    attr_lst.pop(attr_lst.index('econst'))
    for dr in ['files/cp2k/md/npt_f_print_low', 'files/cp2k/md/nvt_print_low']:
        base = os.path.dirname(dr)
        fn = '%s/cp2k.out' % dr
        print("testing: %s" % fn)
        print(common.backtick('tar -C {0} -xzf {1}.tgz'.format(base, dr)))
        tr = io.read_cp2k_md(fn)
        assert_attrs_not_none(tr, attr_lst=attr_lst)
        pp = parse.Cp2kMDOutputFile(fn)
        forces_outfile = pp._get_forces_from_outfile() * Ha / Bohr / eV * Ang
        assert np.allclose(forces_outfile, tr.forces, rtol=1e-3)
예제 #21
0
def test_write_lammps():
    st = crys.Structure(coords_frac=rand(20, 3),
                        cell=rand(3, 3),
                        symbols=['Al'] * 10 + ['N'] * 10)
    # align cell to lammps standard [[x,0,0],...]
    st.coords = None
    st.cell = None
    st.set_all()
    st_fn = common.pj(testdir, 'lmp.struct')
    io.write_lammps(st_fn, st)
    symbols = common.file_read(st_fn + '.symbols').split()
    assert st.symbols == symbols
    cmd = r"grep -A22 Atoms %s | grep '^[0-9]'" % st_fn
    arr = parse.arr2d_from_txt(common.backtick(cmd))
    assert arr.shape == (20, 5)
    assert np.allclose(st.coords, arr[:, 2:])
예제 #22
0
def test_write_lammps():
    st = crys.Structure(coords_frac=rand(20,3),
                        cell=rand(3,3),
                        symbols=['Al']*10+['N']*10)
    # align cell to lammps standard [[x,0,0],...]
    st.coords = None  
    st.cell = None
    st.set_all()
    st_fn = common.pj(testdir, 'lmp.struct')
    io.write_lammps(st_fn, st)
    symbols = common.file_read(st_fn + '.symbols').split()
    assert st.symbols == symbols
    cmd = r"grep -A22 Atoms %s | grep '^[0-9]'" %st_fn
    arr = parse.arr2d_from_txt(common.backtick(cmd))
    assert arr.shape == (20,5)
    assert np.allclose(st.coords, arr[:,2:])
예제 #23
0
def unpack_compressed(src, prefix='tmp', testdir=testdir, ext=None):
    """Convenience function to uncompress files/some_file.out.gz into a random
    location. Return the filename "path/to/random_location/some_file.out"
    without ".gz", which can be used in subsequent commands.
    
    Supported file types: gz, tgz, tar.gz
        gunzip path/to/random_location/some_file.out.gz
        tar -C path/to/random_location -xzf path/to/random_location/some_file.out.tgz
    
    Other compress formats may be implemented as needed.
    
    Can also be used for slightly more complex unpack business, see for
    example test_cpmd_md.py. 

    Parameters
    ----------
    src : str
        path to the compressed file, i.e. files/some_file.out.gz
    prefix : str, optional
        prefix for mkdtemp(), usually __file__ of the test script
        to identify which test script created the random dir
    testdir : str, optional
        'path/to' in the example above, usually
        ``pwtools.test.testenv.testdir``
    ext : str, optional
        file extension of compressed file ('gz', 'tgz', 'tar.gz'), if None then
        it will be guessed from `src`
    """
    # 'path/to/random_location'
    workdir = tempfile.mkdtemp(dir=testdir, prefix=prefix)
    # 'gz'
    ext = src.split('.')[-1] if ext is None else ext
    # 'some_file.out'
    base = os.path.basename(src).replace('.' + ext, '')
    # path/to/random_location/some_file.out
    filename = '{workdir}/{base}'.format(workdir=workdir, base=base)
    cmd = "mkdir -p {workdir}; cp {src} {workdir}/; "
    if ext == 'gz':
        cmd += "gunzip {filename}.{ext};"
    elif ext in ['tgz', 'tar.gz']:
        cmd += "tar -C {workdir} -xzf {filename}.{ext};"
    else:
        raise Exception("unsuported file format of file: {}".format(src))
    cmd = cmd.format(workdir=workdir, src=src, filename=filename, ext=ext)
    print(common.backtick(cmd))
    assert os.path.exists(filename), "unpack failed: '%s' not found" % filename
    return filename
예제 #24
0
def test_pwscf_calculator():
    if not have_ase():
        skip("no ASE found, skipping test")
    elif not have_pwx():
        skip("no pw.x found, skipping test")
    else:
        pseudo_dir = pj(testdir, prefix, 'pseudo')
        print(common.backtick("mkdir -pv {p}; cp files/qe_pseudos/*.gz {p}/; \
            gunzip {p}/*".format(p=pseudo_dir)))
        at = get_atoms_with_calc_pwscf(pseudo_dir)

        print("scf")
        # trigger calculation here
        forces = at.get_forces()
        etot = at.get_potential_energy()
        stress = at.get_stress(voigt=False) # 3x3
        
        st = io.read_pw_scf(at.calc.label + '.out')
        assert np.allclose(forces, st.forces)
        assert np.allclose(etot, st.etot)
        assert np.allclose(st.stress, -stress * constants.eV_by_Ang3_to_GPa)
        
        # files/ase/pw.scf.out.start is a norm-conserving LDA struct,
        # calculated with pz-vbc.UPF, so the PBE vc-relax will make the cell
        # a bit bigger
        print("vc-relax")
        from ase.optimize import BFGS
        from ase.constraints import UnitCellFilter
        opt = BFGS(UnitCellFilter(at))
        cell = parse.arr2d_from_txt("""
            -1.97281509  0.          1.97281509
             0.          1.97281509  1.97281509
            -1.97281509  1.97281509  0.""")        
        assert np.allclose(cell, at.get_cell())
        opt.run(fmax=0.05) # run only 2 steps
        cell = parse.arr2d_from_txt("""
            -2.01837531  0.          2.01837531
             0.          2.01837531  2.01837531
            -2.01837531  2.01837531  0""")
        assert np.allclose(cell, at.get_cell())

        # at least 1 backup files must exist: pw.*.0 is the SCF run, backed up
        # in the first iter of the vc-relax
        assert os.path.exists(at.calc.infile + '.0')
예제 #25
0
def test_lammps_calculator():
    if not have_ase():
        skip("no ASE found, skipping test")
    elif not have_lmp():
        skip("no lammps found, skipping test")
    else:
        at = get_atoms_with_calc_lammps()
        at.rattle(stdev=0.001, seed=int(time.time()))
        common.makedirs(at.calc.directory)
        print(common.backtick("cp -v utils/lammps/AlN.tersoff {p}/".format(
            p=at.calc.directory)))

        print("scf")
        forces = at.get_forces()
        etot = at.get_potential_energy()
        stress = at.get_stress(voigt=False) # 3x3
        
        st = io.read_lammps_md_txt(at.calc.label + '.out')[0]
        assert np.allclose(forces, st.forces)
        assert np.allclose(etot, st.etot)
        assert np.allclose(st.stress, -stress * constants.eV_by_Ang3_to_GPa,
                           atol=1e-10)
        
        print("relax")
        from ase.optimize import BFGS
        opt = BFGS(at, maxstep=0.04)
        opt.run(fmax=0.001, steps=10)
        coords_frac = parse.arr2d_from_txt("""
            3.3333341909920072e-01    6.6666683819841532e-01    4.4325467247779138e-03
            6.6666681184103216e-01    3.3333362368205072e-01    5.0443254824788963e-01
            3.3333341909918301e-01    6.6666683819838046e-01    3.8356759709402671e-01
            6.6666681184101539e-01    3.3333362368201563e-01    8.8356759861713752e-01
            """)
        assert np.allclose(coords_frac, at.get_scaled_positions(), atol=1e-2)

        # at least 1 backup files must exist
        assert os.path.exists(at.calc.infile + '.0')
        assert os.path.exists(at.calc.outfile + '.0')
        assert os.path.exists(at.calc.dumpfile + '.0')
        assert os.path.exists(at.calc.structfile + '.0')
예제 #26
0
def test_cp2k_cell_opt():
    attr_lst = parse.Cp2kRelaxOutputFile().attr_lst
    attr_lst.pop(attr_lst.index('econst'))
    # There is no PROJECT-frc-1.xyz file, but the input file has
    #    &force_eval
    #        &print
    #            &forces
    #            &end forces
    #        &end print
    #    &end force_eval
    # therefore we can parse forces from the outfile.
    none_attrs = [
        'ekin',
        'temperature',
        'timestep',
        'velocity',
    ]
    for dr in ['files/cp2k/cell_opt/cell_opt']:
        base = os.path.dirname(dr)
        fn = '%s/cp2k.out' % dr
        print("testing: %s" % fn)
        print(common.backtick('tar -C {0} -xzf {1}.tgz'.format(base, dr)))
        tr = io.read_cp2k_relax(fn)
        assert_attrs_not_none(tr, attr_lst=attr_lst, none_attrs=none_attrs)
예제 #27
0
#!/usr/bin/python2

# Use inside a hg repo to plot a gragh showing the number of changesets vs.
# tags.
#
# usage:
#   cd /path/to/repo
#   ./<this_script>.py

from cStringIO import StringIO
from matplotlib import pyplot as plt
import numpy as np
from pwtools import common, mpl

st = common.backtick("hg tags | sed -re 's/^(.*)\s+([0-9]+):.*$/\\1 \\2/'")
data = np.loadtxt(StringIO(st), dtype=str)
tags = data[:,0][::-1]
commits = data[:,1].astype(int)[::-1]

fig, ax = mpl.fig_ax()
xx = range(len(tags))
ax.plot(xx, commits, '.-')
ax.set_xticks(xx)
ax.set_xticklabels(tags, rotation='vertical')
plt.show()
예제 #28
0
def test_absolute_signal():
    # std lib signal
    from pwtools.common import signal
    assert not hasattr(signal, 'fftsample')
    from pwtools import common
    print(common.backtick('ls'))
예제 #29
0
def test_write_mol():
    units={'forces': Ha / eV}
    nstep = 2
    cell2d = np.random.rand(3,3)
    cell3d = np.random.rand(nstep,3,3)
    # fractional
    coords2d_frac = np.array([[0.5, 0.5, 0.5],
                              [1,1,1]])
    # fractional, 2 time steps: (2,2,3) = (nstep, natoms, 3)
    coords3d_frac = np.array([coords2d_frac, coords2d_frac[...,None]*0.8])
    # cartesian = coords3d_frac + cell2d (fixed cell). For varialbe cell cases
    # below, cell3d is used!
    coords3d_cart = crys.coord_trans(coords3d_frac, 
                                     old=cell2d, 
                                     new=np.identity(3),
                                     axis=-1)
    coords2d_cart = coords3d_cart[0,...]
    symbols = ['H']*2
    forces2d = np.random.random(coords2d_frac.shape) 
    forces3d = np.random.random(coords3d_frac.shape)

    # --- AXSF ---------------------------------------------------------------
    # fixed cell, forces=0
    axsf_fn = pj(testdir, 'foo.axsf')
    io.write_axsf(axsf_fn, 
                  Trajectory(units=units,coords_frac=coords3d_frac, 
                             cell=cell2d,
                             symbols=symbols),
                 )                                    
    arr = np.loadtxt(StringIO(
            common.backtick("grep -A3 PRIMVEC %s | egrep -v -e '--|PRIMVEC'" %axsf_fn)))
    np.testing.assert_array_almost_equal(arr, np.concatenate((cell2d, cell2d), axis=0))

    arr = np.loadtxt(StringIO(
            common.backtick("sed -nre 's/^H(.*)/\\1/gp' %s" %axsf_fn)))
    arr2 = np.vstack((coords3d_cart[0,...],coords3d_cart[1,...]))
    np.testing.assert_array_almost_equal(arr, arr2)
    
    # fixed cell, forces3d, coords_frac
    axsf_fn = pj(testdir, 'foo3.axsf')
    io.write_axsf(axsf_fn, 
                  Trajectory(units=units,coords_frac=coords3d_frac, 
                             cell=cell2d,
                             symbols=symbols,
                             forces=forces3d),
                 )                             
    arr = np.loadtxt(StringIO(
            common.backtick("sed -nre 's/^H(.*)/\\1/gp' %s" %axsf_fn)))
    t0 = np.concatenate((coords3d_cart[0,...], forces3d[0,...]), axis=-1)
    t1 = np.concatenate((coords3d_cart[1,...], forces3d[1,...]), axis=-1)
    arr2 = np.vstack((t0,t1))
    print arr
    print arr2
    print "----------------"
    np.testing.assert_array_almost_equal(arr, arr2)
    
    # variable cell, forces3d, coords_frac
    axsf_fn = pj(testdir, 'foo4.axsf')
    io.write_axsf(axsf_fn, 
                  Trajectory(units=units,coords_frac=coords3d_frac, 
                             cell=cell3d,
                             symbols=symbols,
                             forces=forces3d))
    arr = np.loadtxt(StringIO(
            common.backtick("grep -A3 PRIMVEC %s | grep -v -e '--' -e 'PRIMVEC'" %axsf_fn)))
    arr2 = np.vstack((cell3d[0,...], cell3d[1,...]))           
    print arr
    print arr2
    print "----------------"
    np.testing.assert_array_almost_equal(arr, arr2)
    arr = np.loadtxt(StringIO(
            common.backtick("sed -nre 's/^H(.*)/\\1/gp' %s" %axsf_fn)))
    t0 = np.concatenate((np.dot(coords3d_frac[0,...], cell3d[0,...]), 
                         forces3d[0,...]), axis=-1)
    t1 = np.concatenate((np.dot(coords3d_frac[1,...], cell3d[1,...]), 
                         forces3d[1,...]), axis=-1)
    arr2 = np.vstack((t0,t1))
    print arr
    print arr2
    print "----------------"
    np.testing.assert_array_almost_equal(arr, arr2)
    
    # single struct, coords_cart
    axsf_fn = pj(testdir, 'foo6.axsf')
    io.write_axsf(axsf_fn, 
                  Structure(units=units,coords=coords2d_cart, 
                            cell=cell2d,
                            symbols=symbols,
                            forces=forces2d))
    arr = np.loadtxt(StringIO(
            common.backtick("grep -A3 PRIMVEC %s | grep -v -e '--' -e 'PRIMVEC'" %axsf_fn)))
    arr2 = cell2d           
    print arr
    print arr2
    print "----------------"
    np.testing.assert_array_almost_equal(arr, arr2)
    arr = np.loadtxt(StringIO(
            common.backtick("sed -nre 's/^H(.*)/\\1/gp' %s" %axsf_fn)))
    arr2 = np.concatenate((coords2d_cart, forces2d), axis=1)
    print arr
    print arr2
    print "----------------"
    np.testing.assert_array_almost_equal(arr, arr2)
    

    # --- XYZ ----------------------------------------------------------------
    # Use cell, coords, etc from above

    # input: coords_frac
    xyz_fn = pj(testdir, 'foo_frac_input.xyz')
    io.write_xyz(xyz_fn, 
                 Trajectory(units=units,coords_frac=coords3d_frac, 
                            cell=cell2d,
                            symbols=symbols),
                 name='foo') 
    arr = np.loadtxt(StringIO(
            common.backtick("sed -nre 's/^H(.*)/\\1/gp' %s" %xyz_fn)))
    arr2 = np.concatenate([coords3d_cart[0,...], coords3d_cart[1,...]], axis=0)
    np.testing.assert_array_almost_equal(arr, arr2)

    # input: coords_cart, cell=None
    xyz_fn = pj(testdir, 'foo_cart_input.xyz')
    io.write_xyz(xyz_fn, 
                 Trajectory(units=units,coords=coords3d_cart, 
                            symbols=symbols),
                 name='foo') 
    arr = np.loadtxt(StringIO(
            common.backtick("sed -nre 's/^H(.*)/\\1/gp' %s" %xyz_fn)))
    arr2 = np.concatenate((coords3d_cart[0,...], coords3d_cart[1,...]), axis=0)
    np.testing.assert_array_almost_equal(arr, arr2)

    # input: coords2d_frac, cell=cell2d
    xyz_fn = pj(testdir, 'foo_cart_input.xyz')
    io.write_xyz(xyz_fn, 
                 Structure(units=units,coords_frac=coords2d_frac, 
                           cell=cell2d,
                           symbols=symbols),
                 name='foo') 
    arr = np.loadtxt(StringIO(
            common.backtick("sed -nre 's/^H(.*)/\\1/gp' %s" %xyz_fn)))
    arr2 = coords2d_cart
    np.testing.assert_array_almost_equal(arr, arr2)
예제 #30
0
def test_write_mol():
    units = {'forces': Ha / eV}
    nstep = 2
    cell2d = np.random.rand(3, 3)
    cell3d = np.random.rand(nstep, 3, 3)
    # fractional
    coords2d_frac = np.array([[0.5, 0.5, 0.5], [1, 1, 1]])
    # fractional, 2 time steps: (2,2,3) = (nstep, natoms, 3)
    coords3d_frac = np.array([coords2d_frac, coords2d_frac * 0.8])
    # cartesian = coords3d_frac + cell2d (fixed cell). For varialbe cell cases
    # below, cell3d is used!
    coords3d_cart = crys.coord_trans(coords3d_frac,
                                     old=cell2d,
                                     new=np.identity(3),
                                     axis=-1)
    coords2d_cart = coords3d_cart[0, ...]
    symbols = ['H'] * 2
    forces2d = np.random.random(coords2d_frac.shape)
    forces3d = np.random.random(coords3d_frac.shape)

    # --- AXSF ---------------------------------------------------------------
    # fixed cell, forces=0
    axsf_fn = pj(testdir, 'foo.axsf')
    io.write_axsf(
        axsf_fn,
        Trajectory(units=units,
                   coords_frac=coords3d_frac,
                   cell=cell2d,
                   symbols=symbols),
    )
    arr = np.loadtxt(
        StringIO(
            common.backtick("grep -A3 PRIMVEC %s | grep -vE -e '--|PRIMVEC'" %
                            axsf_fn)))
    np.testing.assert_array_almost_equal(
        arr, np.concatenate((cell2d, cell2d), axis=0))

    arr = np.loadtxt(
        StringIO(common.backtick("sed -nre 's/^H(.*)/\\1/gp' %s" % axsf_fn)))
    arr2 = np.vstack((coords3d_cart[0, ...], coords3d_cart[1, ...]))
    np.testing.assert_array_almost_equal(arr, arr2)

    # fixed cell, forces3d, coords_frac
    axsf_fn = pj(testdir, 'foo3.axsf')
    io.write_axsf(
        axsf_fn,
        Trajectory(units=units,
                   coords_frac=coords3d_frac,
                   cell=cell2d,
                   symbols=symbols,
                   forces=forces3d),
    )
    arr = np.loadtxt(
        StringIO(common.backtick("sed -nre 's/^H(.*)/\\1/gp' %s" % axsf_fn)))
    t0 = np.concatenate((coords3d_cart[0, ...], forces3d[0, ...]), axis=-1)
    t1 = np.concatenate((coords3d_cart[1, ...], forces3d[1, ...]), axis=-1)
    arr2 = np.vstack((t0, t1))
    print(arr)
    print(arr2)
    print("----------------")
    np.testing.assert_array_almost_equal(arr, arr2)

    # variable cell, forces3d, coords_frac
    axsf_fn = pj(testdir, 'foo4.axsf')
    io.write_axsf(
        axsf_fn,
        Trajectory(units=units,
                   coords_frac=coords3d_frac,
                   cell=cell3d,
                   symbols=symbols,
                   forces=forces3d))
    arr = np.loadtxt(
        StringIO(
            common.backtick(
                "grep -A3 PRIMVEC %s | grep -v -e '--' -e 'PRIMVEC'" %
                axsf_fn)))
    arr2 = np.vstack((cell3d[0, ...], cell3d[1, ...]))
    print(arr)
    print(arr2)
    print("----------------")
    np.testing.assert_array_almost_equal(arr, arr2)
    arr = np.loadtxt(
        StringIO(common.backtick("sed -nre 's/^H(.*)/\\1/gp' %s" % axsf_fn)))
    t0 = np.concatenate(
        (np.dot(coords3d_frac[0, ...], cell3d[0, ...]), forces3d[0, ...]),
        axis=-1)
    t1 = np.concatenate(
        (np.dot(coords3d_frac[1, ...], cell3d[1, ...]), forces3d[1, ...]),
        axis=-1)
    arr2 = np.vstack((t0, t1))
    print(arr)
    print(arr2)
    print("----------------")
    np.testing.assert_array_almost_equal(arr, arr2)

    # single struct, coords_cart
    axsf_fn = pj(testdir, 'foo6.axsf')
    io.write_axsf(
        axsf_fn,
        Structure(units=units,
                  coords=coords2d_cart,
                  cell=cell2d,
                  symbols=symbols,
                  forces=forces2d))
    arr = np.loadtxt(
        StringIO(
            common.backtick(
                "grep -A3 PRIMVEC %s | grep -v -e '--' -e 'PRIMVEC'" %
                axsf_fn)))
    arr2 = cell2d
    print(arr)
    print(arr2)
    print("----------------")
    np.testing.assert_array_almost_equal(arr, arr2)
    arr = np.loadtxt(
        StringIO(common.backtick("sed -nre 's/^H(.*)/\\1/gp' %s" % axsf_fn)))
    arr2 = np.concatenate((coords2d_cart, forces2d), axis=1)
    print(arr)
    print(arr2)
    print("----------------")
    np.testing.assert_array_almost_equal(arr, arr2)

    # --- XYZ ----------------------------------------------------------------
    # Use cell, coords, etc from above

    # input: coords_frac
    xyz_fn = pj(testdir, 'foo_frac_input.xyz')
    io.write_xyz(xyz_fn,
                 Trajectory(units=units,
                            coords_frac=coords3d_frac,
                            cell=cell2d,
                            symbols=symbols),
                 name='foo')
    arr = np.loadtxt(
        StringIO(common.backtick("sed -nre 's/^H(.*)/\\1/gp' %s" % xyz_fn)))
    arr2 = np.concatenate([coords3d_cart[0, ...], coords3d_cart[1, ...]],
                          axis=0)
    np.testing.assert_array_almost_equal(arr, arr2)

    # input: coords_cart, cell=None
    xyz_fn = pj(testdir, 'foo_cart_input.xyz')
    io.write_xyz(xyz_fn,
                 Trajectory(units=units, coords=coords3d_cart,
                            symbols=symbols),
                 name='foo')
    arr = np.loadtxt(
        StringIO(common.backtick("sed -nre 's/^H(.*)/\\1/gp' %s" % xyz_fn)))
    arr2 = np.concatenate((coords3d_cart[0, ...], coords3d_cart[1, ...]),
                          axis=0)
    np.testing.assert_array_almost_equal(arr, arr2)

    # input: coords2d_frac, cell=cell2d
    xyz_fn = pj(testdir, 'foo_cart_input.xyz')
    io.write_xyz(xyz_fn,
                 Structure(units=units,
                           coords_frac=coords2d_frac,
                           cell=cell2d,
                           symbols=symbols),
                 name='foo')
    arr = np.loadtxt(
        StringIO(common.backtick("sed -nre 's/^H(.*)/\\1/gp' %s" % xyz_fn)))
    arr2 = coords2d_cart
    np.testing.assert_array_almost_equal(arr, arr2)
예제 #31
0
def test_eos():
    # load reference fitted with ElkEOSFit, data points [Bohr^3, Ha] -> [Ang^3, eV]
    # EV input data [Bohr^3, Ry] -> [Ang^3, eV]
    data = np.loadtxt("files/ev/evdata.txt")
    volume = data[:,0] * Bohr3_to_Ang3
    energy = data[:,1] * (Ry / eV)
    ref_ev = np.loadtxt("files/ev/EVPAI.OUT.gz")
    ref_ev[:,0] *= Bohr3_to_Ang3
    ref_ev[:,1] *= (Ha / eV)
    ref_pv = np.loadtxt("files/ev/PVPAI.OUT.gz")
    ref_pv[:,0] *= Bohr3_to_Ang3
    ref_min = np.loadtxt("files/ev/min.txt")
    ref_min[0] *= Bohr3_to_Ang3 # v0
    ref_min[1] *= (Ry / eV)     # e0
    assert ref_ev.shape[0] == ref_pv.shape[0], ("reference data lengths "
        "inconsistent")
    ref = {}        
    ref['ev'] = ref_ev        
    ref['pv'] = ref_pv
    ref['v0'], ref['e0'], ref['p0'], ref['b0'] = ref_min

    # test new EosFit class, default func=Vinet()
    eos = EosFit(volume=volume,
                 energy=energy)
    assert np.allclose(eos.params['v0'], eos.spl.get_min())
    assert np.allclose(eos.params['v0'], eos.get_min())
    assert np.allclose(eos.params['e0'], eos(eos.params['v0']))
    assert np.allclose(eos.params['b0']*eV_by_Ang3_to_GPa, eos.bulkmod(eos.params['v0']))
    now = {}
    now['v0'] = eos.params['v0']
    now['e0'] = eos.params['e0']
    now['b0'] = eos.params['b0'] * eV_by_Ang3_to_GPa
    now['p0'] = eos.pressure(eos.params['v0'])
    for key,val in now.items():
        msg = "EosFit: key=%s, ref=%e, val=%e" %(key, ref[key], val)
        assert np.allclose(val, ref[key], atol=1e-7), msg

    # Test legacy ElkEOSFit / ExternEOS. 
    # 'exe' must be on your $PATH.
    exe = 'eos.x'
    app = common.backtick("which %s" %exe).strip()
    if app == '':
        tools.skip("cannot find '%s' on PATH, skipping test" %exe)
    else:
        eos_store = {}
        type_arr = type(np.array([1.0,2.0]))
        for bv_method in ['ev', 'pv']:
            print("bv_method: %s" %bv_method)
            # natoms = 1, no normalization
            eos = ElkEOSFit(energy=energy,
                            volume=volume,
                            natoms=1,
                            etype=1,
                            npoints=300,
                            dir=testdir,
                            bv_method=bv_method)
            eos.fit()
            now = {}
            now['ev'] = eos.ev
            now['pv'] = eos.pv
            now.update(eos.get_min())
            
            # compare to reference
            for key, val in ref.items():
                print("ElkEOSFit: testing:", key)
                if type(val) == type_arr:
                    np.testing.assert_array_almost_equal(now[key], ref[key])
                else:
                    np.testing.assert_almost_equal(now[key], ref[key],
                                                   decimal=3)
            eos_store[bv_method] = eos
            
            # internal check: are the splines correct?
            for name in ['ev', 'pv', 'bv']:
                # API
                getter = getattr(eos, 'get_spl_' + name)
                assert getattr(eos, 'spl_' + name) == getter()
                # (N,2) arrays self.{ev,pv,bv}
                data = getattr(eos, name)
                vv = data[:,0]
                yy = data[:,1]
                # self.spl_{ev,pv,bv}
                spl = getattr(eos, 'spl_' + name)
                np.testing.assert_array_almost_equal(yy, spl(vv))

        # Other attrs for which we do not have external ref data. Test only
        # among the two bv_methods 'ev' and 'pv'.
        print("bv")
        np.testing.assert_array_almost_equal(eos_store['ev'].bv, 
                                             eos_store['pv'].bv,
                                             decimal=2)
예제 #32
0
파일: pwscf.py 프로젝트: zari277/pwtools
def read_matdyn_modes(filename, natoms=None):
    """Parse modes file produced by QE's matdyn.x.
    
    Parameters
    ----------
    filename : str
        File to parse (usually "matdyn.modes")
    natoms : int
        Number of atoms.
    
    Returns
    -------
    qpoints, freqs, vecs
    qpoints : 2d array (nqpoints, 3)
        All qpoints on the grid.
    freqs : 2d array, (nqpoints, nmodes) where nmodes = 3*natoms
        Each row: 3*natoms phonon frequencies in [cm^-1] at each q-point.
    vecs : 4d complex array (nqpoints, nmodes, natoms, 3)
        Complex eigenvectors of the dynamical matrix for each q-point.
    
    Examples
    --------
    >>> qpoints,freqs,vecs=read_matdyn_modes('matdyn.modes',natoms=27)
    # how many q-points? -> 8
    >>> qpoints.shape
    (8,3)
    # 1st q-point in file, mode #3 (out of 3*27) -> vectors on all 27 atoms
    >>> vecs[0,2,...].shape
    (27,3)
    # 1st q-point in file, mode #3, vector on atom #15
    >>> vecs[0,2,14,:].real
    array([-0.010832,  0.026063, -0.089511])
    >>> vecs[0,2,14,:].imag
    array([ 0.,  0.,  0.])

    Notes
    -----
    The file to be parsed looks like this::

           diagonalizing the dynamical matrix ...
      
       q =       0.0000      0.0000      0.0000
       **************************************************************************
           omega( 1) =     -26.663631 [THz] =    -889.402992 [cm-1]
       ( -0.218314   0.000000    -0.025643   0.000000    -0.116601   0.000000   )
       ( -0.086633   0.000000     0.108966   0.000000    -0.066513   0.000000   )
      [... natoms lines: x_real x_imag y_real y_imag z_real z_imag ... until
       next omega ...]
           omega( 2) =     -16.330246 [THz] =    -544.718372 [cm-1]
       (  0.172149   0.000000     0.008336   0.000000    -0.121991   0.000000   )
       ( -0.061497   0.000000     0.003782   0.000000    -0.018304   0.000000   )
      [... until omega(3*natoms) ...]
       **************************************************************************
           diagonalizing the dynamical matrix ...
      
      [... until next q-point ...]
       q =       0.0000      0.0000     -0.5000
       **************************************************************************
           omega( 1) =     -24.881828 [THz] =    -829.968443 [cm-1]
       ( -0.225020   0.000464    -0.031584   0.000061    -0.130217   0.000202   )
       ( -0.085499   0.000180     0.107383  -0.000238    -0.086854   0.000096   )
      [...]
       **************************************************************************
    """
    assert natoms is not None
    cmd = r"grep 'q.*=' %s | sed -re 's/.*q\s*=(.*)/\1/'" %filename
    qpoints = parse.arr2d_from_txt(common.backtick(cmd))
    nqpoints = qpoints.shape[0]
    nmodes = 3*natoms
    cmd = r"grep '^[ ]*(' %s | sed -re 's/^\s*\((.*)\)/\1/g'" %filename
    # vecs_file_flat: (nqpoints * nmodes * natoms, 6)
    # this line is the bottleneck
    vecs_file_flat = parse.arr2d_from_txt(common.backtick(cmd))
    vecs_flat = np.empty((vecs_file_flat.shape[0], 3), dtype=complex)
    vecs_flat[:,0] = vecs_file_flat[:,0] + 1j*vecs_file_flat[:,1]
    vecs_flat[:,1] = vecs_file_flat[:,2] + 1j*vecs_file_flat[:,3]
    vecs_flat[:,2] = vecs_file_flat[:,4] + 1j*vecs_file_flat[:,5]
    vecs = vecs_flat.flatten().reshape(nqpoints, nmodes, natoms, 3)
    cmd = r"grep omega %s | sed -re \
            's/.*omega.*=.*\[.*=(.*)\s*\[.*/\1/g'" %filename
    freqs = np.fromstring(common.backtick(cmd), sep=' ').reshape((nqpoints, nmodes))
    return qpoints, freqs, vecs
예제 #33
0
def test_absolute_signal():
    # std lib signal
    from pwtools.common import signal
    assert not hasattr(signal, 'fftsample')
    from pwtools import common
    print common.backtick('ls')
예제 #34
0
파일: test_eos.py 프로젝트: elcorto/pwtools
def test_eos():
    # load reference fitted with ElkEOSFit, data points [Bohr^3, Ha] -> [Ang^3, eV]
    # EV input data [Bohr^3, Ry] -> [Ang^3, eV]
    data = np.loadtxt("files/ev/evdata.txt")
    volume = data[:,0] * Bohr3_to_Ang3
    energy = data[:,1] * (Ry / eV)
    ref_ev = np.loadtxt("files/ev/EVPAI.OUT.gz")
    ref_ev[:,0] *= Bohr3_to_Ang3
    ref_ev[:,1] *= (Ha / eV)
    ref_pv = np.loadtxt("files/ev/PVPAI.OUT.gz")
    ref_pv[:,0] *= Bohr3_to_Ang3
    ref_min = np.loadtxt("files/ev/min.txt")
    ref_min[0] *= Bohr3_to_Ang3 # v0
    ref_min[1] *= (Ry / eV)     # e0
    assert ref_ev.shape[0] == ref_pv.shape[0], ("reference data lengths "
        "inconsistent")
    ref = {}        
    ref['ev'] = ref_ev        
    ref['pv'] = ref_pv
    ref['v0'], ref['e0'], ref['p0'], ref['b0'] = ref_min

    # test new EosFit class, default func=Vinet()
    eos = EosFit(volume=volume,
                 energy=energy)
    assert np.allclose(eos.params['v0'], eos.spl.get_min())
    assert np.allclose(eos.params['v0'], eos.get_min())
    assert np.allclose(eos.params['e0'], eos(eos.params['v0']))
    assert np.allclose(eos.params['b0']*eV_by_Ang3_to_GPa, eos.bulkmod(eos.params['v0']))
    now = {}
    now['v0'] = eos.params['v0']
    now['e0'] = eos.params['e0']
    now['b0'] = eos.params['b0'] * eV_by_Ang3_to_GPa
    now['p0'] = eos.pressure(eos.params['v0'])
    for key,val in now.iteritems():
        msg = "EosFit: key=%s, ref=%e, val=%e" %(key, ref[key], val)
        assert np.allclose(val, ref[key], atol=1e-7), msg

    # Test legacy ElkEOSFit / ExternEOS. 
    # 'exe' must be on your $PATH.
    exe = 'eos.x'
    app = common.backtick("which %s" %exe).strip()
    if app == '':
        tools.skip("cannot find '%s' on PATH, skipping test" %exe)
    else:
        eos_store = {}
        type_arr = type(np.array([1.0,2.0]))
        for bv_method in ['ev', 'pv']:
            print "bv_method: %s" %bv_method
            # natoms = 1, no normalization
            eos = ElkEOSFit(energy=energy,
                            volume=volume,
                            natoms=1,
                            etype=1,
                            npoints=300,
                            dir=testdir,
                            bv_method=bv_method)
            eos.fit()
            now = {}
            now['ev'] = eos.ev
            now['pv'] = eos.pv
            now.update(eos.get_min())
            
            # compare to reference
            for key, val in ref.iteritems():
                print "ElkEOSFit: testing:", key
                if type(val) == type_arr:
                    np.testing.assert_array_almost_equal(now[key], ref[key])
                else:
                    np.testing.assert_almost_equal(now[key], ref[key],
                                                   decimal=3)
            eos_store[bv_method] = eos
            
            # internal check: are the splines correct?
            for name in ['ev', 'pv', 'bv']:
                # API
                getter = getattr(eos, 'get_spl_' + name)
                assert getattr(eos, 'spl_' + name) == getter()
                # (N,2) arrays self.{ev,pv,bv}
                data = getattr(eos, name)
                vv = data[:,0]
                yy = data[:,1]
                # self.spl_{ev,pv,bv}
                spl = getattr(eos, 'spl_' + name)
                np.testing.assert_array_almost_equal(yy, spl(vv))

        # Other attrs for which we do not have external ref data. Test only
        # among the two bv_methods 'ev' and 'pv'.
        print "bv"
        np.testing.assert_array_almost_equal(eos_store['ev'].bv, 
                                             eos_store['pv'].bv,
                                             decimal=2)
예제 #35
0
파일: pwscf.py 프로젝트: elcorto/pwtools
def read_matdyn_modes(filename, natoms=None):
    """Parse modes file produced by QE's matdyn.x.
    
    Parameters
    ----------
    filename : str
        File to parse (usually "matdyn.modes")
    natoms : int
        Number of atoms.
    
    Returns
    -------
    qpoints, freqs, vecs
    qpoints : 2d array (nqpoints, 3)
        All qpoints on the grid.
    freqs : 2d array, (nqpoints, nmodes) where nmodes = 3*natoms
        Each row: 3*natoms phonon frequencies in [cm^-1] at each q-point.
    vecs : 4d complex array (nqpoints, nmodes, natoms, 3)
        Complex eigenvectors of the dynamical matrix for each q-point.
    
    Examples
    --------
    >>> qpoints,freqs,vecs=read_matdyn_modes('matdyn.modes',natoms=27)
    # how many q-points? -> 8
    >>> qpoints.shape
    (8,3)
    # 1st q-point in file, mode #3 (out of 3*27) -> vectors on all 27 atoms
    >>> vecs[0,2,...].shape
    (27,3)
    # 1st q-point in file, mode #3, vector on atom #15
    >>> vecs[0,2,14,:].real
    array([-0.010832,  0.026063, -0.089511])
    >>> vecs[0,2,14,:].imag
    array([ 0.,  0.,  0.])

    Notes
    -----
    The file to be parsed looks like this::

           diagonalizing the dynamical matrix ...
      
       q =       0.0000      0.0000      0.0000
       **************************************************************************
           omega( 1) =     -26.663631 [THz] =    -889.402992 [cm-1]
       ( -0.218314   0.000000    -0.025643   0.000000    -0.116601   0.000000   )
       ( -0.086633   0.000000     0.108966   0.000000    -0.066513   0.000000   )
      [... natoms lines: x_real x_imag y_real y_imag z_real z_imag ... until
       next omega ...]
           omega( 2) =     -16.330246 [THz] =    -544.718372 [cm-1]
       (  0.172149   0.000000     0.008336   0.000000    -0.121991   0.000000   )
       ( -0.061497   0.000000     0.003782   0.000000    -0.018304   0.000000   )
      [... until omega(3*natoms) ...]
       **************************************************************************
           diagonalizing the dynamical matrix ...
      
      [... until next q-point ...]
       q =       0.0000      0.0000     -0.5000
       **************************************************************************
           omega( 1) =     -24.881828 [THz] =    -829.968443 [cm-1]
       ( -0.225020   0.000464    -0.031584   0.000061    -0.130217   0.000202   )
       ( -0.085499   0.000180     0.107383  -0.000238    -0.086854   0.000096   )
      [...]
       **************************************************************************
    """
    assert natoms is not None
    cmd = r"grep 'q.*=' %s | sed -re 's/.*q\s*=(.*)/\1/'" %filename
    qpoints = parse.arr2d_from_txt(common.backtick(cmd))
    nqpoints = qpoints.shape[0]
    nmodes = 3*natoms
    cmd = r"grep '^[ ]*(' %s | sed -re 's/^\s*\((.*)\)/\1/g'" %filename
    # vecs_file_flat: (nqpoints * nmodes * natoms, 6)
    # this line is the bottleneck
    vecs_file_flat = parse.arr2d_from_txt(common.backtick(cmd))
    vecs_flat = np.empty((vecs_file_flat.shape[0], 3), dtype=complex)
    vecs_flat[:,0] = vecs_file_flat[:,0] + 1j*vecs_file_flat[:,1]
    vecs_flat[:,1] = vecs_file_flat[:,2] + 1j*vecs_file_flat[:,3]
    vecs_flat[:,2] = vecs_file_flat[:,4] + 1j*vecs_file_flat[:,5]
    vecs = vecs_flat.flatten().reshape(nqpoints, nmodes, natoms, 3)
    cmd = r"grep omega %s | sed -re \
            's/.*omega.*=.*\[.*=(.*)\s*\[.*/\1/g'" %filename
    freqs = np.fromstring(common.backtick(cmd), sep=' ').reshape((nqpoints, nmodes))
    return qpoints, freqs, vecs
예제 #36
0
파일: test_sql.py 프로젝트: elcorto/pwtools
def test_sql():
    # Check for sqlite3 command line tool. In Python 3.3, we can use
    # shutil.which().
    have_sqlite3 = False
    for pp in sys.path:
        exe = pj(pp, 'sqlite3')
        if os.path.isfile(exe):
            print "found:", exe
            have_sqlite3 = True
            break
        
    # --- SQLiteDB ----------------------------------------------------
    dbfn = pj(testdir, 'test.db')
    if os.path.exists(dbfn):
        os.remove(dbfn)

    header = [('idx', 'INTEGER'), ('foo', 'REAL'), ('bar', 'TEXT')]
    db = SQLiteDB(dbfn, table='calc')
    db.execute("CREATE TABLE calc (%s)" %','.join("%s %s" %(x[0], x[1]) \
                                                  for x in header)) 

    vals = [[0, 1.1, 'a'],
            [1, 2.2, 'b'],
            [2, 3.3, 'c']]
    for lst in vals:
        db.execute("INSERT INTO calc (idx, foo, bar) VALUES (?,?,?)", tuple(lst))
    db.commit()
    
    # get_max_rowid
    assert db.get_max_rowid() == 3

    # has_table
    assert db.has_table('calc')
    assert not db.has_table('foo')
    
    # has_column
    assert db.has_column('idx')
    assert not db.has_column('grrr')

    # get_single
    assert float(db.get_single("select foo from calc where idx==0")) == 1.1

    assert header == db.get_header()
    
    if have_sqlite3:
        # call sqlite3, the cmd line interface
        assert common.backtick("sqlite3 %s 'select * from calc'" %dbfn) \
            == '0|1.1|a\n1|2.2|b\n2|3.3|c\n'

    # ret = 
    # [(0, 1.1000000000000001, u'a'),
    # (1, 2.2000000000000002, u'b'),
    # (2, 3.2999999999999998, u'c')]
    ret = db.execute("select * from calc").fetchall()
    for idx, lst in enumerate(vals):
        assert list(ret[idx]) == lst

    # generator object, yields
    # tup = (0, 1.1000000000000001, u'a')
    # tup = (1, 2.2000000000000002, u'b')
    # tup = (2, 3.2999999999999998, u'c')
    itr = db.execute("select * from calc")
    for idx, tup in enumerate(itr):
        assert list(tup) == vals[idx]

    # [(0, 1.1000000000000001, u'a')]
    assert db.execute("select * from calc where idx==0").fetchall() == \
        [tuple(vals[0])]
    # (0, 1.1000000000000001, u'a')
    assert db.execute("select * from calc where idx==0").fetchone() == \
        tuple(vals[0])

    assert db.execute("select bar from calc where idx==0").fetchone()[0] == \
        'a'
    
    # get_list1d(), get_array1d(), get_array()
    assert db.get_list1d("select idx from calc") == [0,1,2]
    np.testing.assert_array_equal(db.get_array1d("select idx from calc"),
                                  np.array([0,1,2]))
    np.testing.assert_array_equal(db.get_array("select idx from calc"), 
                                  np.array([0,1,2])[:,None])
    np.testing.assert_array_equal(db.get_array("select idx,foo from calc"), 
                                  np.array(vals, dtype='S3')[:,:2].astype(float))

    # add_column(), fill with values
    db.add_column('baz', 'TEXT')
    add_header = [('baz', 'TEXT')]
    header += add_header
    assert db.get_header() == header
    db.execute("UPDATE %s SET baz='xx' where idx==0" %db.table)
    db.execute("UPDATE %s SET baz='yy' where idx==1" %db.table)
    db.execute("UPDATE %s SET baz=? where idx==2" %db.table, ('zz',))
    db.commit()
    if have_sqlite3:
        print common.backtick("sqlite3 %s 'select * from calc'" %dbfn)
    print db.execute("select baz from calc").fetchall()
    assert db.execute("select baz from calc").fetchall() == \
        [(u'xx',), (u'yy',), (u'zz',)]
    
    # add even more cols with add_columns()
    add_header = [('bob', 'TEXT'), ('alice', 'BLOB')]
    header += add_header
    db.add_columns(add_header)
    assert db.get_header() == header
    
    # create_table()
    dbfn2 = pj(testdir, 'test2.db')
    header2 = [('a', 'REAL'), ('b', 'TEXT')]
    db2 = SQLiteDB(dbfn2, table='foo')
    db2.create_table(header2)
    assert db2.get_header() == header2
    
    # get_dict()
    dct = db.get_dict("select foo,bar from calc")
    cols = [x[0] for x in db.get_header()]
    for key in ['foo', 'bar']:
        assert key in cols
    foo = db.get_list1d("select foo from calc")
    bar = db.get_list1d("select bar from calc")
    assert foo == dct['foo']
    assert bar == dct['bar']
예제 #37
0
파일: 10input.py 프로젝트: elcorto/pwtools

# step 6
# ------
#
# Now, we extend the study. This time we write input for both machines. Since
# for one machine, input is already there, a backup will be made
# (calc_host0.0). Observe the changes to calc.db: revision=1, column "hostname"
# mentions both machines. calc_*/run.sh refers only to the new calculations. A
# file "excl_push" is written, which lists all old calculation indices. Can be
# used with ``rsync --exclude-from=excl_push``.
#
params_lst = []
for ecutwfc in np.array([70, 80.0]):
    for pseudo in ["Si.paw"]:
        params_lst.append([sql.SQLEntry(key="ecutwfc", sqlval=ecutwfc), sql.SQLEntry(key="pseudo", sqlval=pseudo)])

calc = batch.ParameterStudy(machines=[host0, host1], templates=templates, params_lst=params_lst, study_name=study_name)
calc.write_input(sleep=0, backup=True, mode="a")


# Load written sqlite DB and print table.
print common.backtick(
    "sqlite3 -column -header calc.db \
                       'select * from calc'"
)

# Some example db query using Python.
db = sql.SQLiteDB("calc.db", table="calc")
print db.get_dict("select idx,ecutwfc,pseudo from calc where ecutwfc <= 60")
예제 #38
0
파일: test_sql.py 프로젝트: zari277/pwtools
def test_sql():
    # Check for sqlite3 command line tool. In Python 3.3, we can use
    # shutil.which().
    have_sqlite3 = False
    for pp in sys.path:
        exe = pj(pp, 'sqlite3')
        if os.path.isfile(exe):
            print("found:", exe)
            have_sqlite3 = True
            break

    # --- SQLiteDB ----------------------------------------------------
    dbfn = pj(testdir, 'test.db')
    if os.path.exists(dbfn):
        os.remove(dbfn)

    header = [('idx', 'INTEGER'), ('foo', 'REAL'), ('bar', 'TEXT')]
    db = SQLiteDB(dbfn, table='calc')
    db.execute("CREATE TABLE calc (%s)" %','.join("%s %s" %(x[0], x[1]) \
                                                  for x in header))

    vals = [[0, 1.1, 'a'], [1, 2.2, 'b'], [2, 3.3, 'c']]
    for lst in vals:
        db.execute("INSERT INTO calc (idx, foo, bar) VALUES (?,?,?)",
                   tuple(lst))
    db.commit()

    # get_max_rowid
    assert db.get_max_rowid() == 3

    # has_table
    assert db.has_table('calc')
    assert not db.has_table('foo')

    # has_column
    assert db.has_column('idx')
    assert not db.has_column('grrr')

    # get_single
    assert float(db.get_single("select foo from calc where idx==0")) == 1.1

    assert header == db.get_header()

    if have_sqlite3:
        # call sqlite3, the cmd line interface
        assert common.backtick("sqlite3 %s 'select * from calc'" %dbfn) \
            == '0|1.1|a\n1|2.2|b\n2|3.3|c\n'

    # ret =
    # [(0, 1.1000000000000001, u'a'),
    # (1, 2.2000000000000002, u'b'),
    # (2, 3.2999999999999998, u'c')]
    ret = db.execute("select * from calc").fetchall()
    for idx, lst in enumerate(vals):
        assert list(ret[idx]) == lst

    # generator object, yields
    # tup = (0, 1.1000000000000001, u'a')
    # tup = (1, 2.2000000000000002, u'b')
    # tup = (2, 3.2999999999999998, u'c')
    itr = db.execute("select * from calc")
    for idx, tup in enumerate(itr):
        assert list(tup) == vals[idx]

    # [(0, 1.1000000000000001, u'a')]
    assert db.execute("select * from calc where idx==0").fetchall() == \
        [tuple(vals[0])]
    # (0, 1.1000000000000001, u'a')
    assert db.execute("select * from calc where idx==0").fetchone() == \
        tuple(vals[0])

    assert db.execute("select bar from calc where idx==0").fetchone()[0] == \
        'a'

    # get_list1d(), get_array1d(), get_array()
    assert db.get_list1d("select idx from calc") == [0, 1, 2]
    np.testing.assert_array_equal(db.get_array1d("select idx from calc"),
                                  np.array([0, 1, 2]))
    np.testing.assert_array_equal(db.get_array("select idx from calc"),
                                  np.array([0, 1, 2])[:, None])
    np.testing.assert_array_equal(
        db.get_array("select idx,foo from calc"),
        np.array(vals, dtype='S3')[:, :2].astype(float))

    # add_column(), fill with values
    db.add_column('baz', 'TEXT')
    add_header = [('baz', 'TEXT')]
    header += add_header
    assert db.get_header() == header
    db.execute("UPDATE %s SET baz='xx' where idx==0" % db.table)
    db.execute("UPDATE %s SET baz='yy' where idx==1" % db.table)
    db.execute("UPDATE %s SET baz=? where idx==2" % db.table, ('zz', ))
    db.commit()
    if have_sqlite3:
        print(common.backtick("sqlite3 %s 'select * from calc'" % dbfn))
    print(db.execute("select baz from calc").fetchall())
    assert db.execute("select baz from calc").fetchall() == \
        [('xx',), ('yy',), ('zz',)]

    # add even more cols with add_columns()
    add_header = [('bob', 'TEXT'), ('alice', 'BLOB')]
    header += add_header
    db.add_columns(add_header)
    assert db.get_header() == header

    # create_table()
    dbfn2 = pj(testdir, 'test2.db')
    header2 = [('a', 'REAL'), ('b', 'TEXT')]
    db2 = SQLiteDB(dbfn2, table='foo')
    db2.create_table(header2)
    assert db2.get_header() == header2

    # get_dict()
    dct = db.get_dict("select foo,bar from calc")
    cols = [x[0] for x in db.get_header()]
    for key in ['foo', 'bar']:
        assert key in cols
    foo = db.get_list1d("select foo from calc")
    bar = db.get_list1d("select bar from calc")
    assert foo == dct['foo']
    assert bar == dct['bar']
예제 #39
0
def test_cif2sgroup():
    print common.backtick("../bin/cif2sgroup.py files/cif_struct.cif")
예제 #40
0
# for one machine, input is already there, a backup will be made
# (calc_host0.0). Observe the changes to calc.db: revision=1, column "hostname"
# mentions both machines. calc_*/run.sh refers only to the new calculations. A
# file "excl_push" is written, which lists all old calculation indices. Can be
# used with ``rsync --exclude-from=excl_push``.
#
params_lst = []
for ecutwfc in np.array([70, 80.0]):
    for pseudo in ['Si.paw']:
        params_lst.append([
            sql.SQLEntry(key='ecutwfc', sqlval=ecutwfc),
            sql.SQLEntry(key='pseudo', sqlval=pseudo)
        ])

calc = batch.ParameterStudy(
    machines=[host0, host1],
    templates=templates,
    params_lst=params_lst,
    study_name=study_name,
)
calc.write_input(sleep=0, backup=True, mode='a')

# Load written sqlite DB and print table.
print(
    common.backtick("sqlite3 -column -header calc.db \
                       'select * from calc'"))

# Some example db query using Python.
db = sql.SQLiteDB('calc.db', table='calc')
print(db.get_dict("select idx,ecutwfc,pseudo from calc where ecutwfc <= 60"))
예제 #41
0
def test_cif2any():
    print common.backtick("../bin/cif2any.py files/cif_struct.cif")