def test_scf_cell():
    filename = 'files/pw.vc_relax_coords_fixed.out'
    common.system('gunzip %s.gz' %filename)
    
    pp = parse.PwSCFOutputFile(filename, use_alat=False)
    cell_2d_red = pp.get_cell()
    assert np.allclose(cell_2d_red, cell_2d_red_ref, atol=1e-15, rtol=0)
    
    pp = parse.PwSCFOutputFile(filename, use_alat=True)
    assert np.allclose(pp.get_cell(), 
                       cell_2d_red*pp.get_alat(), 
                       atol=1e-15,
                       rtol=0)
    
    st = io.read_pw_scf(filename)
    tr = io.read_pw_md(filename)

    # tr is from a vc-relax w/ fixed fractional coords, check that
    assert np.allclose(np.zeros((tr.nstep,tr.natoms,3)), 
                       tr.coords_frac - tr.coords_frac[0,...].copy(),
                       rtol=0, atol=1e-15)

    # check if scf parser gets the same coords_frac as the trajectory parser
    # Note: this and the next test have the same max error of
    # 4.33868466709e-08 (b/c of limited accuracy in printed numbers in
    # pwscf output)
    assert np.allclose(st.coords_frac,tr.coords_frac[0,...], atol=1e-7, rtol=0)
    
    # same test, plus test of concatenate() works
    trcat = crys.concatenate((st,tr))
    assert np.allclose(np.zeros((trcat.nstep,trcat.natoms,3)), 
                       trcat.coords_frac - trcat.coords_frac[0,...].copy(),
                       rtol=0, atol=1e-7)
예제 #2
0
    def __call__(self, obj, logfile=None, structfile=None, disp=False,
                 keepfiles=False, tmpdir='/tmp', wait=True, bg=False,
                 options=''):
        """
        Call viewer. 
        
        The executed shell command is::
            
            <cmd> <options> <structfile> > <logfile>
        
        Parameters
        ----------
        obj : Structure or Trajectory
        logfile : str, optional
            Filename of a logfile for the viewer's text output.
        structfile : str, optional
            Filename of a file to write the structure to.
        disp : bool
            Display text output (i.e. `logfile`'s content).
        keepfiles : bool
            Keep `structfile` and `logfile` on disk.
        tmpdir : str, optional
            Directory where temp files are written to.
        wait : bool, optional
            `wait` passed to common.system(), wait (or not) for command to exit
        bg : bool
            Background mode. If True then this is an alias for `wait=False` +
            `keepfiles=True`. The latter is needed b/c with just `wait=False`,
            temp files will be deleted right after the shell call and the
            viewer program may complain.
        """        
        if bg:
            wait = False
            keepfiles = True
        if self.assert_cmd is not None:
            self.assert_cmd(obj)    
        self._set_dummy_symbols(obj)
        if structfile is None:
            fd1,structfile = mkstemp(dir=tmpdir,
                                     prefix='pwtools_view_struct_',
                                     suffix=self.suffix)

        if logfile is None:
            fd2,logfile = mkstemp(dir=tmpdir, 
                                  prefix='pwtools_view_log_')
        self.writer(structfile, obj)
        if disp:
            cmd_str = "%s %s %s 2>&1 | tee %s" %(self.cmd, options, structfile, logfile)
        else:       
            cmd_str = "%s %s %s > %s 2>&1" %(self.cmd, options, structfile, logfile)
        common.system(cmd_str, wait=wait)
        if not keepfiles:
            os.unlink(structfile)
            os.unlink(logfile)
예제 #3
0
def test_mix_output():
    # Mixing 'run' and 'minimize' commands (and/or using either command
    # multiple times) causes massive jibber-jabber text output in log.lammps,
    # which we filter. Check if we get the  "thermo_style custom" data between
    # "Step..." and "Loop..." from each command.
    #
    # In this test, we have 3 commands (minimize, run (short MD), minimize),
    # which are all set to perform 10 steps, so we have 30 in total. Due to
    # redundant printing by lammps, the result arrays are a bit longer.
    tgz = 'files/lammps/mix_output.tgz'
    tgz_path = os.path.dirname(tgz)
    unpack_path = tgz.replace('.tgz', '')
    common.system("tar -C {0} -xzf {1}".format(tgz_path, tgz))
    tr = io.read_lammps_md_txt("{0}/log.lammps".format(unpack_path))
    assert tr.nstep == 31
    assert tr.coords.shape == (31, 4, 3)
    assert tr.stress.shape == (33, 3, 3)
    assert tr.temperature.shape == (33, )
예제 #4
0
def test_mix_output():
    # Mixing 'run' and 'minimize' commands (and/or using either command
    # multiple times) causes massive jibber-jabber text output in log.lammps,
    # which we filter. Check if we get the  "thermo_style custom" data between
    # "Step..." and "Loop..." from each command. 
    #
    # In this test, we have 3 commands (minimize, run (short MD), minimize),
    # which are all set to perform 10 steps, so we have 30 in total. Due to
    # redundant printing by lammps, the result arrays are a bit longer.
    tgz = 'files/lammps/mix_output.tgz'
    tgz_path = os.path.dirname(tgz)
    unpack_path = tgz.replace('.tgz','')
    common.system("tar -C {0} -xzf {1}".format(tgz_path,tgz))
    tr = io.read_lammps_md_txt("{0}/log.lammps".format(unpack_path))
    assert tr.nstep == 31
    assert tr.coords.shape == (31,4,3)
    assert tr.stress.shape == (33,3,3)
    assert tr.temperature.shape == (33,)
예제 #5
0
def test_pw_vc_relax_out():
    filename = 'files/pw.vc_relax_cell_unit.out'
    common.system('gunzip %s.gz' % filename)
    pp = PwMDOutputFile(filename=filename)
    pp.parse()
    common.system('gzip %s' % filename)
    none_attrs = [
        'coords',
        'ekin',
        'temperature',
        'timestep',
    ]
    assert_attrs_not_none(pp, none_attrs=none_attrs)
    traj = pp.get_traj()
    none_attrs = [\
        'ekin',
        'temperature',
        'timestep',
        'velocity',
        'time',
        ]
    assert_attrs_not_none(traj, none_attrs=none_attrs)
    assert pp.cell_unit == 'alat'
    assert pp.cell.shape == (6, 3, 3)
    for idx in range(1, pp.cell.shape[0]):
        assert crys.rms(pp.cell[idx, ...] - pp.cell[0, ...]) > 0.0

    # Test _get_block_header_unit, which is used in get_cell_unit().
    dct = \
        {'FOO': None,
         'FOO alat': 'alat',
         'FOO (alat)': 'alat',
         'FOO {alat}': 'alat',
         'FOO (alat=1.23)': 'alat',
         'FOO (alat=  1.23)': 'alat',
         }

    for txt, val in dct.items():
        fn = pj(testdir, 'test_block_header_unit.txt')
        common.file_write(fn, txt)
        pp.filename = fn
        assert pp._get_block_header_unit('FOO') == val
예제 #6
0
def test_pw_vc_relax_out():
    filename = 'files/pw.vc_relax_cell_unit.out'
    common.system('gunzip %s.gz' %filename)
    pp = PwMDOutputFile(filename=filename)
    pp.parse()
    common.system('gzip %s' %filename)
    none_attrs = ['coords', 
                  'ekin', 
                  'temperature',
                  'timestep',
                  ]
    assert_attrs_not_none(pp, none_attrs=none_attrs)
    traj = pp.get_traj()
    none_attrs = [\
        'ekin', 
        'temperature',
        'timestep',
        'velocity',
        'time',
        ]
    assert_attrs_not_none(traj, none_attrs=none_attrs)
    assert pp.cell_unit == 'alat'
    assert pp.cell.shape == (6,3,3)
    for idx in range(1, pp.cell.shape[0]):
        assert crys.rms(pp.cell[idx,...] - pp.cell[0,...]) > 0.0
    
    # Test _get_block_header_unit, which is used in get_cell_unit().
    dct = \
        {'FOO': None,
         'FOO alat': 'alat',
         'FOO (alat)': 'alat',
         'FOO {alat}': 'alat',
         'FOO (alat=1.23)': 'alat',
         'FOO (alat=  1.23)': 'alat',
         }

    for txt,val in dct.iteritems():
        fn = pj(testdir, 'test_block_header_unit.txt')
        common.file_write(fn, txt)
        pp.filename = fn
        assert pp._get_block_header_unit('FOO') == val
예제 #7
0
def test_pw_vc_relax_out():
    filename = 'files/pw.vc_relax.out'
    common.system('gunzip %s.gz' %filename)
    pp = PwMDOutputFile(filename=filename)
    pp.parse()
    common.system('gzip %s' %filename)
    none_attrs = ['coords', 
                  'ekin', 
                  'temperature',
                  'timestep',
                  ]
    assert_attrs_not_none(pp, none_attrs=none_attrs)
    traj = pp.get_traj()
    none_attrs = [\
        'ekin', 
        'temperature',
        'timestep',
        'velocity',
        'time',
        ]
    assert_attrs_not_none(traj, none_attrs=none_attrs)   
예제 #8
0
def run(tgz, skip=[], atol_map={}):
    tgz_path = os.path.dirname(tgz)
    unpack_path = tgz.replace('.tgz','')
    common.system("tar -C {0} -xzf {1}".format(tgz_path,tgz))
    tr1 = io.read_lammps_md_txt("{0}/log.lammps".format(unpack_path))
    tr2 = io.read_lammps_md_dcd("{0}/log.lammps".format(unpack_path))
    for name in tr1.attr_lst:
        if name in skip:
            continue
        elif atol_map.has_key(name):
            set_atol(atol_map[name])
        else:
            set_atol()
        x1 = getattr(tr1, name)
        x2 = getattr(tr2, name)
        print name
        tools.assert_all_types_almost_equal(x1, x2) 
    # stress
    assert (tr1.stress[:,0,1] == tr1.stress[:,1,0]).all()
    assert (tr1.stress[:,0,2] == tr1.stress[:,2,0]).all()
    assert (tr1.stress[:,1,2] == tr1.stress[:,2,1]).all()
예제 #9
0
def run(tgz, skip=[], atol_map={}):
    tgz_path = os.path.dirname(tgz)
    unpack_path = tgz.replace('.tgz', '')
    common.system("tar -C {0} -xzf {1}".format(tgz_path, tgz))
    tr1 = io.read_lammps_md_txt("{0}/log.lammps".format(unpack_path))
    tr2 = io.read_lammps_md_dcd("{0}/log.lammps".format(unpack_path))
    for name in tr1.attr_lst:
        if name in skip:
            continue
        elif name in atol_map:
            set_atol(atol_map[name])
        else:
            set_atol()
        x1 = getattr(tr1, name)
        x2 = getattr(tr2, name)
        print(name)
        tools.assert_all_types_almost_equal(x1, x2)
    # stress
    assert (tr1.stress[:, 0, 1] == tr1.stress[:, 1, 0]).all()
    assert (tr1.stress[:, 0, 2] == tr1.stress[:, 2, 0]).all()
    assert (tr1.stress[:, 1, 2] == tr1.stress[:, 2, 1]).all()
예제 #10
0
def test_scf_cell():
    filename = 'files/pw.vc_relax_coords_fixed.out'
    common.system('gunzip %s.gz' % filename)

    pp = parse.PwSCFOutputFile(filename, use_alat=False)
    cell_2d_red = pp.get_cell()
    assert np.allclose(cell_2d_red, cell_2d_red_ref, atol=1e-15, rtol=0)

    pp = parse.PwSCFOutputFile(filename, use_alat=True)
    assert np.allclose(pp.get_cell(),
                       cell_2d_red * pp.get_alat(),
                       atol=1e-15,
                       rtol=0)

    st = io.read_pw_scf(filename)
    tr = io.read_pw_md(filename)

    # tr is from a vc-relax w/ fixed fractional coords, check that
    assert np.allclose(np.zeros((tr.nstep, tr.natoms, 3)),
                       tr.coords_frac - tr.coords_frac[0, ...].copy(),
                       rtol=0,
                       atol=1e-15)

    # check if scf parser gets the same coords_frac as the trajectory parser
    # Note: this and the next test have the same max error of
    # 4.33868466709e-08 (b/c of limited accuracy in printed numbers in
    # pwscf output)
    assert np.allclose(st.coords_frac,
                       tr.coords_frac[0, ...],
                       atol=1e-7,
                       rtol=0)

    # same test, plus test of concatenate() works
    trcat = crys.concatenate((st, tr))
    assert np.allclose(np.zeros((trcat.nstep, trcat.natoms, 3)),
                       trcat.coords_frac - trcat.coords_frac[0, ...].copy(),
                       rtol=0,
                       atol=1e-7)
예제 #11
0
pair_coeff * * ../AlN.tersoff Al N

### IO
dump dump_txt all custom 1 lmp.out.dump id type xu yu zu fx fy fz &
    vx vy vz xsu ysu zsu 
dump_modify dump_txt sort id 
dump dump_dcd all dcd 1 lmp.out.dcd
dump_modify dump_dcd sort id unwrap yes
thermo_style custom step temp vol cella cellb cellc cellalpha cellbeta cellgamma &
                    ke pe etotal &
                    press pxx pyy pzz pxy pxz pyz cpu press
thermo_modify flush yes
thermo 1

fix 1 all box/relax tri 0.0
minimize 1e-8 1e-8 5000 10000 
"""

st = crys.Structure(coords_frac=np.array([[0.0]*3, [.5]*3]),
                    cryst_const=np.array([2.85]*3 + [60]*3),
                    symbols=['Al','N'])

for dr in ['md-nvt', 'md-npt', 'vc-relax']:
    common.system("rm -rfv {dr}; mkdir -v {dr}".format(dr=dr))
io.write_lammps('vc-relax/lmp.struct', st)
io.write_lammps('md-nvt/lmp.struct', crys.scell(st,(2,2,2)))
io.write_lammps('md-npt/lmp.struct', crys.scell(st,(2,2,2)))

for dr,txt in lmp_in.iteritems():
    common.file_write('%s/lmp.in' %dr, txt)
예제 #12
0
matdyn_in_fn = 'matdyn.disp.in'
matdyn_freq_fn = 'matdyn.freq.disp'
mass_str = '\n'.join("amass(%i)=%e" %(ii+1,m) for ii,m in \
                      enumerate(st.mass_unique))
rules = {
    'XXXNKS': ks_path.shape[0],
    'XXXKS': common.str_arr(ks_path),
    'XXXMASS': mass_str,
    'XXXFNFREQ': matdyn_freq_fn,
}
txt = common.template_replace(templ_txt, rules, conv=True, mode='txt')
common.file_write(matdyn_in_fn, txt)
redo_matdyn = input('redo the matdyn.x calculation? Y/N')
if redo_matdyn == "Y":
    common.system(
        "gunzip q2r.fc.gz; /gpfsnyu/home/yf1159/qe-6.5/bin/matdyn.x < %s; gzip q2r.fc"
        % matdyn_in_fn)
else:
    pass

# parse matdyn output and plot

# define special points path, used in plot_dis() to plot lines at special
# points and make x-labels
sp = kpath.SpecialPointsPath(ks=sp_points,
                             ks_frac=sp_points_frac,
                             symbols=sp_symbols)

# QE 4.x, 5.x
# ks is the coordinates of kpoints and the freqs is the frequencies for each band
ks, freqs = pwscf.read_matdyn_freq(matdyn_freq_fn)
예제 #13
0
    def __call__(self,
                 obj,
                 logfile=None,
                 structfile=None,
                 disp=False,
                 keepfiles=False,
                 tmpdir='/tmp',
                 wait=True,
                 bg=False,
                 options=''):
        """
        Call viewer. 
        
        The executed shell command is::
            
            <cmd> <options> <structfile> > <logfile>
        
        Parameters
        ----------
        obj : Structure or Trajectory
        logfile : str, optional
            Filename of a logfile for the viewer's text output.
        structfile : str, optional
            Filename of a file to write the structure to.
        disp : bool
            Display text output (i.e. `logfile`'s content).
        keepfiles : bool
            Keep `structfile` and `logfile` on disk.
        tmpdir : str, optional
            Directory where temp files are written to.
        wait : bool, optional
            `wait` passed to common.system(), wait (or not) for command to exit
        bg : bool
            Background mode. If True then this is an alias for `wait=False` +
            `keepfiles=True`. The latter is needed b/c with just `wait=False`,
            temp files will be deleted right after the shell call and the
            viewer program may complain.
        """
        if bg:
            wait = False
            keepfiles = True
        if self.assert_cmd is not None:
            self.assert_cmd(obj)
        self._set_dummy_symbols(obj)
        if structfile is None:
            fd1, structfile = mkstemp(dir=tmpdir,
                                      prefix='pwtools_view_struct_',
                                      suffix=self.suffix)

        if logfile is None:
            fd2, logfile = mkstemp(dir=tmpdir, prefix='pwtools_view_log_')
        self.writer(structfile, obj)
        if disp:
            cmd_str = "%s %s %s 2>&1 | tee %s" % (self.cmd, options,
                                                  structfile, logfile)
        else:
            cmd_str = "%s %s %s > %s 2>&1" % (self.cmd, options, structfile,
                                              logfile)
        common.system(cmd_str, wait=wait)
        if not keepfiles:
            os.unlink(structfile)
            os.unlink(logfile)
예제 #14
0
파일: 10input.py 프로젝트: elcorto/pwtools
templates = [batch.FileTemplate(basename='pw.in')]

# rs-AlN
st = crys.Structure(coords_frac=np.array([[0.0]*3, [0.5]*3]),
                    symbols=['Al','N'],
                    cryst_const=np.array([2.76]*3 + [60]*3))

params_lst = []
for ecutwfc in np.linspace(30,100,8):
    params_lst.append([sql.SQLEntry(key='ecutwfc', sqlval=ecutwfc),
                       sql.SQLEntry(key='ecutrho', sqlval=4.0*ecutwfc),
                       sql.SQLEntry(key='cell', sqlval=common.str_arr(st.cell)),
                       sql.SQLEntry(key='natoms', sqlval=st.natoms),
                       sql.SQLEntry(key='atpos',
                                    sqlval=pwscf.atpos_str(st.symbols,
                                                           st.coords_frac)),
                      ])

calc = batch.ParameterStudy(machines=theo,
                            templates=templates,
                            params_lst=params_lst, 
                            study_name='convergence_test_cutoff',
                            )
calc.write_input(sleep=0, backup=False, mode='w')

if not os.path.exists('calc'):
    os.symlink('calc_theo', 'calc')

common.system("cp -r ../../../test/files/qe_pseudos calc_theo/pseudo; gunzip calc_theo/pseudo/*")
예제 #15
0
def view_lst(lst):
    from pwtools.common import system
    cmd = 'qiv -fm ' + ' '.join(lst)
    system(cmd, wait=True)
예제 #16
0
import os
import numpy as np
from matplotlib import pyplot as plt
from pwtools import parse, crys, constants, common, io

pj = os.path.join

if __name__ == '__main__':
    
    tmpdir = '/tmp/rpdf_vmd_test/'
    if not os.path.exists(tmpdir):
        os.makedirs(tmpdir)
    dct = {}
    
    common.system("gunzip pw.out.gz")
    traj = io.read_pw_md('pw.out')
    common.system("gzip pw.out")
    symbols = np.array(traj.symbols)
    
    # O_Ca
    msk1 = symbols=='O'
    msk2 = symbols=='Ca'
    amask = [msk1, msk2]
    tmask = np.s_[-1]
    dct['O:Ca:-1:-1'] = {'amask': amask, 'tmask': tmask}
    tmask = np.s_[0:]
    dct['O:Ca:0:-1'] = {'amask': amask, 'tmask': tmask}
    
    # Ca_O
    msk1 = symbols=='Ca'
예제 #17
0
파일: batch.py 프로젝트: zari277/pwtools
 def write_input(self, mode='a', backup=True, sleep=0, excl=True):
     """
     Create calculation dir(s) for each parameter set and write input files
     based on ``templates``. Write sqlite database storing all relevant
     parameters. Write (bash) shell script to start all calculations (run
     locally or submitt batch job file, depending on ``machine.subcmd``).
 
     Parameters
     ----------
     mode : str, optional
         Fine tune how to write input files (based on ``templates``) to calc
         dirs calc_foo/0/, calc_foo/1/, ... . Note that this doesn't change
         the base dir calc_foo at all, only the subdirs for each calc.
         {'a', 'w'}
         
         | 'a': Append mode (default). If a previous database is found, then
         |     subsequent calculations are numbered based on the last 'idx'.
         |     calc_foo/0 # old
         |     calc_foo/1 # old
         |     calc_foo/2 # new
         |     calc_foo/3 # new
         | 'w': Write mode. The target dirs are purged and overwritten. Also,
         |     the database (self.dbfn) is overwritten. Use this to
         |     iteratively tune your inputs, NOT for working on already
         |     present results!
         |     calc_foo/0 # new
         |     calc_foo/1 # new
     backup : bool, optional
         Before writing anything, do a backup of self.calc_dir if it already
         exists.
     sleep : int, optional
         For the script to start (submitt) all jobs: time in seconds for the
         shell sleep(1) commmand.
     excl : bool
         If in append mode, a file <calc_root>/excl_push with all indices of
         calculations from old revisions is written. Can be used with
         ``rsync --exclude-from=excl_push`` when pushing appended new
         calculations to a cluster.
     """
     assert mode in ['a', 'w'], "Unknown mode: '%s'" % mode
     if os.path.exists(self.dbfn):
         if backup:
             common.backup(self.dbfn)
         if mode == 'w':
             os.remove(self.dbfn)
     have_new_db = not os.path.exists(self.dbfn)
     common.makedirs(self.calc_root)
     # this call creates a file ``self.dbfn`` if it doesn't exist
     sqldb = SQLiteDB(self.dbfn, table=self.db_table)
     # max_idx: counter for calc dir numbering
     revision = 0
     if have_new_db:
         max_idx = -1
     else:
         if mode == 'a':
             if sqldb.has_column('idx'):
                 max_idx = sqldb.execute("select max(idx) from %s" \
                 %self.db_table).fetchone()[0]
             else:
                 raise Exception(
                     "database '%s': table '%s' has no "
                     "column 'idx', don't know how to number calcs" %
                     (self.dbfn, self.db_table))
             if sqldb.has_column('revision'):
                 revision = int(
                     sqldb.get_single("select max(revision) \
                     from %s" % self.db_table)) + 1
         elif mode == 'w':
             max_idx = -1
     sql_records = []
     hostnames = []
     for imach, machine in enumerate(self.machines):
         hostnames.append(machine.hostname)
         calc_dir = pj(self.calc_root, self.calc_dir_prefix + \
                       '_%s' %machine.hostname)
         if os.path.exists(calc_dir):
             if backup:
                 common.backup(calc_dir)
             if mode == 'w':
                 common.system("rm -r %s" % calc_dir, wait=True)
         run_txt = "here=$(pwd)\n"
         for _idx, params in enumerate(self.params_lst):
             params = common.flatten(params)
             idx = max_idx + _idx + 1
             calc_subdir = pj(calc_dir, str(idx))
             extra_dct = \
                 {'revision': revision,
                  'study_name': self.study_name,
                  'idx': idx,
                  'calc_name' : self.study_name + "_run%i" %idx,
                  }
             extra_params = [SQLEntry(key=key, sqlval=val) for key,val in \
                             extra_dct.items()]
             # templates[:] to copy b/c they may be modified in Calculation
             calc = Calculation(
                 machine=machine,
                 templates=self.templates[:],
                 params=params + extra_params,
                 calc_dir=calc_subdir,
             )
             if mode == 'w' and os.path.exists(calc_subdir):
                 shutil.rmtree(calc_subdir)
             calc.write_input()
             run_txt += "cd %i && %s %s && cd $here && sleep %i\n" %(idx,\
                         machine.subcmd, machine.get_jobfile_basename(), sleep)
             if imach == 0:
                 sql_records.append(calc.get_sql_record())
         common.file_write(pj(calc_dir, 'run.sh'), run_txt)
     for record in sql_records:
         record['hostname'] = SQLEntry(sqlval=','.join(hostnames))
     # for incomplete parameters: collect header parts from all records and
     # make a set = unique entries
     raw_header = [(key, entry.sqltype.upper()) for record in sql_records \
         for key, entry in record.items()]
     header = list(set(raw_header))
     if have_new_db:
         sqldb.create_table(header)
     else:
         for record in sql_records:
             for key, entry in record.items():
                 if not sqldb.has_column(key):
                     sqldb.add_column(key, entry.sqltype.upper())
     for record in sql_records:
         cmd = "insert into %s (%s) values (%s)"\
             %(self.db_table,
               ",".join(list(record.keys())),
               ",".join(['?']*len(list(record.keys()))))
         sqldb.execute(cmd,
                       tuple(entry.sqlval for entry in record.values()))
     if excl and revision > 0 and sqldb.has_column('revision'):
         old_idx_lst = [
             str(x) for x, in sqldb.execute(
                 "select idx from calc where \
                                                       revision < ?", (
                     revision, ))
         ]
         common.file_write(pj(self.calc_root, 'excl_push'),
                           '\n'.join(old_idx_lst))
     sqldb.finish()
예제 #18
0
def unpack(fns):
    for fn in fns:
        common.system('gunzip %s' % fn)
예제 #19
0
def pack(fns):
    for fn in fns:
        common.system('gzip %s' % fn)
예제 #20
0
import os
import numpy as np
from matplotlib import pyplot as plt
from pwtools import parse, crys, constants, common, io

pj = os.path.join

if __name__ == '__main__':

    tmpdir = '/tmp/rpdf_vmd_test/'
    if not os.path.exists(tmpdir):
        os.makedirs(tmpdir)
    dct = {}

    common.system("gunzip pw.out.gz")
    traj = io.read_pw_md('pw.out')
    common.system("gzip pw.out")
    symbols = np.array(traj.symbols)

    # O_Ca
    msk1 = symbols == 'O'
    msk2 = symbols == 'Ca'
    amask = [msk1, msk2]
    tmask = np.s_[-1]
    dct['O:Ca:-1:-1'] = {'amask': amask, 'tmask': tmask}
    tmask = np.s_[0:]
    dct['O:Ca:0:-1'] = {'amask': amask, 'tmask': tmask}

    # Ca_O
    msk1 = symbols == 'Ca'
예제 #21
0
def test_pw_scf_out():
    
    # ref data for Structure, all lengths in Ang, energy in eV
    natoms = 2
    symbols = ['Si', 'Si']
    cell = np.array([[-2.71536701,  0.        ,  2.71536701],
           [ 0.        ,  2.71536701,  2.71536701],
           [-2.71536701,  2.71536701,  0.        ]])
    forces = np.array([[ 2.57110316,  5.14220632,  7.71330948],
                       [-2.57110316, -5.14220632, -7.71330948]]) # eV / Ang
    nspecies = {'Si': 2}
    mass = np.array([ 28.0855,  28.0855]) # amu
    cryst_const = np.array([  3.84010885,   3.84010885,   3.84010885,  60. ,
            60.        ,  60.        ])
    symbols_unique = ['Si']
    etot = -258.58148870118305 # eV
    typat = [1, 1]
    volume = 40.041985843396688 # Ang**3
    stress = np.array([[ 9.825,   0.  ,   0.  ],
           [  0.  ,  9.825,   0.  ],
           [  0.  ,   0.  ,  9.825]]) # GPa
    coords_frac = np.array([[ 0.  ,  0.  ,  0.  ],
           [ 0.25,  0.25,  0.25]])
    pressure = 9.825 # GPa
    coords = np.array([[ 0.        ,  0.        ,  0.        ],
           [-1.35768351,  1.35768351,  1.35768351]])
    order = {'Si': 1}
    alat = 10.2626 # Bohr


    filename = 'files/pw.scf.out'
    common.system('gunzip %s.gz' %filename)

    # use_alat=False. Provide high-precision alat from outside (e.g.
    # from pw.in instead of parsing and using low-precision value from pw.out).
    # Here we use the same alat for the tests.
    pp1 = PwSCFOutputFile(filename=filename, 
                         use_alat=False, # alat=1.0
                         units={'length': alat*Bohr/Ang})
    struct1 = pp1.get_struct() # pp1.parse() called here
    assert_attrs_not_none(struct1) 
    assert_attrs_not_none(pp1) 
    assert pp1.scf_converged is True
    assert alat == pp1.get_alat(True)
    assert 1.0 == pp1.get_alat(False)
    
    aaae(cryst_const, struct1.cryst_const)
    aaae(cell, struct1.cell)
    aaae(coords, struct1.coords)
    aaae(coords_frac, struct1.coords_frac)
    aaae(forces, struct1.forces)
    aaae(stress, struct1.stress)
    assert np.allclose(volume, struct1.volume)
    assert np.allclose(etot, struct1.etot)
    assert np.allclose(pressure, struct1.pressure)
    
    # use_alat=True, alat = 10.2626 Bohr
    pp2 = PwSCFOutputFile(filename=filename, use_alat=True)
    struct2 = pp2.get_struct() # pp.parse() called here
    assert_attrs_not_none(struct2) 
    assert_attrs_not_none(pp2) 
    assert np.allclose(alat, pp2.alat)
    assert pp2.scf_converged is True
    assert alat == pp2.get_alat(True)    # Bohr
    assert 1.0 == pp2.get_alat(False)
    
    # Skip coords adn cell b/c they are modified by self.alat and
    # pp1.alat = 1.0, pp2.alat = 10.2626 
    attr_lst = common.pop_from_list(pp1.attr_lst, ['coords', 'cell'])
    adae(pp1.__dict__, pp2.__dict__, keys=attr_lst)         

    attr_lst = struct1.attr_lst
    adae(struct1.__dict__, struct2.__dict__, keys=attr_lst)         
    
    pp3 = PwSCFOutputFile(filename=filename)
    assert alat == pp3.get_alat() # self.use_alat=True default
    
    common.system('gzip %s' %filename)
예제 #22
0
    fourier_in_data = np.zeros((arr.shape[0], 7))
    fourier_in_data[:, 0] = np.arange(arr.shape[0])
    fourier_in_data[:, 4] = arr
    fourier_in_data_fn = pj(fourier_dir, 'fourier_in_data_1d.txt')
    fourier_out_data_fn = pj(fourier_dir, 'fourier_out_data_1d.txt')
    fourier_in_fn = pj(fourier_dir, 'fourier_1d.in')
    fourier_out_fn = pj(fourier_dir, 'fourier_1d.log')
    fourier_in_txt = '%s\n%s\n%e\n%e\n%e\n%i' % (
        fourier_in_data_fn, fourier_out_data_fn, dt / constants.th, 0,
        fmax * fmax_extend_fac / (constants.c0 * 100), 1)
    common.file_write(fourier_in_fn, fourier_in_txt)
    # In order to make picky gfortrans happy, we need to use savetxt(...,
    # fmt="%g") such that the first column is an integer (1 instead of
    # 1.0000e+00).
    np.savetxt(fourier_in_data_fn, fourier_in_data, fmt='%g')
    common.system("%s < %s > %s" %
                  (fourier_exe, fourier_in_fn, fourier_out_fn))
    fourier_out_data = np.loadtxt(fourier_out_data_fn)
    f3 = fourier_out_data[:, 0] * (constants.c0 * 100)  # 1/cm -> Hz
    y3n = num.norm_int(fourier_out_data[:, 1], f3)

f1, y1n = cut_norm(y1, dt)
f2, y2n = cut_norm(y2, dt)

figs = []
axs = []

figs.append(plt.figure())
axs.append(figs[-1].add_subplot(111))
axs[-1].set_title('1d arr')
axs[-1].plot(f1, y1n, label='1d |fft(arr)|^2, direct')
axs[-1].plot(f2, y2n, label='1d |fft(acorr(arr))|, vacf')
예제 #23
0
파일: run.py 프로젝트: elcorto/pwtools
elif sys.argv[1] == 'nvt':    
    ens_txt = nvt_txt
else:
    raise StandardError("only nvt / npt allowed")

# create structure file
st = crys.Structure(coords_frac=np.array([[0.0]*3, [.5]*3]),
                    cryst_const=np.array([2.85]*3 + [60]*3),
                    symbols=['Al','N'])
io.write_lammps('lmp.struct', crys.scell(st,(3,3,3)))

# write lmp.in for nvt or npt
common.file_write('lmp.in', lmp_in_templ.format(ensemble=ens_txt))

# run lammps
common.system("mpirun -np 2 lammps < lmp.in", wait=True)

# read trajectory
trtxt_orig = io.read_lammps_md_txt('log.lammps')
trdcd = io.read_lammps_md_dcd('log.lammps')

# plotting
plots = mpl.prepare_plots(['coords', 'coords_frac', 'velocity', 
                           'cryst_const', 'cell'])
for name,pl in plots.iteritems():
    trtxt = trtxt_orig.copy()
    print name
    xtxt = getattr(trtxt, name)
    setattr(trtxt, name, None)
    xcalc = eval('trtxt.get_%s()' %name)
    if name == 'cell':
예제 #24
0
def run(dr, none_attrs=[]):
    dr = dr[:-1] if dr.endswith('/') else dr
    archive = dr + '.tgz'
    workdir = unpack_compressed(archive)
    common.system('../bin/cut-cpmd.sh %s 20 > %s/cut-cpmd.log' %(workdir,
        workdir))
예제 #25
0
    fourier_in_data_fn = pj(fourier_dir, 'fourier_in_data_1d.txt')
    fourier_out_data_fn = pj(fourier_dir, 'fourier_out_data_1d.txt')
    fourier_in_fn = pj(fourier_dir, 'fourier_1d.in')
    fourier_out_fn = pj(fourier_dir, 'fourier_1d.log')
    fourier_in_txt = '%s\n%s\n%e\n%e\n%e\n%i' %(fourier_in_data_fn,
                                                fourier_out_data_fn,
                                                dt/constants.th,
                                                0,
                                                fmax*fmax_extend_fac/(constants.c0*100),
                                                1)
    common.file_write(fourier_in_fn, fourier_in_txt)
    # In order to make picky gfortrans happy, we need to use savetxt(...,
    # fmt="%g") such that the first column is an integer (1 instead of
    # 1.0000e+00). 
    np.savetxt(fourier_in_data_fn, fourier_in_data, fmt='%g')
    common.system("%s < %s > %s" %(fourier_exe, fourier_in_fn, fourier_out_fn))
    fourier_out_data = np.loadtxt(fourier_out_data_fn)
    f3 = fourier_out_data[:,0]*(constants.c0*100) # 1/cm -> Hz
    y3n = num.norm_int(fourier_out_data[:,1], f3)

f1, y1n = cut_norm(y1, dt)
f2, y2n = cut_norm(y2, dt)

figs = []
axs = []

figs.append(plt.figure())
axs.append(figs[-1].add_subplot(111))
axs[-1].set_title('1d arr')
axs[-1].plot(f1, y1n, label='1d |fft(arr)|^2, direct')
axs[-1].plot(f2, y2n, label='1d |fft(acorr(arr))|, vacf')
예제 #26
0
XXXNKS
XXXKS
"""
matdyn_in_fn = 'matdyn.disp.in'
matdyn_freq_fn = 'matdyn.freq.disp'
mass_str = '\n'.join("amass(%i)=%e" %(ii+1,m) for ii,m in \
                      enumerate(st.mass_unique))
rules = {
    'XXXNKS': ks_path.shape[0],
    'XXXKS': common.str_arr(ks_path),
    'XXXMASS': mass_str,
    'XXXFNFREQ': matdyn_freq_fn,
}
txt = common.template_replace(templ_txt, rules, conv=True, mode='txt')
common.file_write(matdyn_in_fn, txt)
common.system("gunzip q2r.fc.gz; matdyn.x < %s; gzip q2r.fc" % matdyn_in_fn)

# parse matdyn output and plot

# define special points path, used in plot_dis() to plot lines at special
# points and make x-labels
sp = kpath.SpecialPointsPath(ks=sp_points,
                             ks_frac=sp_points_frac,
                             symbols=sp_symbols)

# QE 4.x, 5.x
ks, freqs = pwscf.read_matdyn_freq(matdyn_freq_fn)
fig, ax = kpath.plot_dis(kpath.get_path_norm(ks_path),
                         freqs,
                         sp,
                         marker='',
예제 #27
0
파일: test_qha.py 프로젝트: elcorto/pwtools
def pack(fns):
    for fn in fns:
        common.system('gzip %s' %fn)
예제 #28
0
파일: 10input.py 프로젝트: elcorto/pwtools
                      scratch='/tmp',
                      filename='calc.templ/job.local',
                      home='/home/schmerler')

templates = [batch.FileTemplate(basename=x) for x in 
             ['lmp.in', 'lmp.struct', 'lmp.struct.symbols']]

# rs-AlN
st = crys.Structure(coords_frac=np.array([[0.0]*3, [0.5]*3]),
                    symbols=['Al','N'],
                    cryst_const=np.array([2.78]*3 + [60]*3))

params_lst = []
for target_press in np.linspace(-20,20,15): # GPa, bar in lammps
    params_lst.append([sql.SQLEntry(key='target_press', sqlval=target_press*1e4),
                       sql.SQLEntry(key='struct', sqlval=lammps.struct_str(st)),
                       sql.SQLEntry(key='symbols', sqlval='\n'.join(st.symbols)),
                      ])

calc = batch.ParameterStudy(machines=local,
                            templates=templates,
                            params_lst=params_lst, 
                            study_name='lammps_ev',
                            )
calc.write_input(sleep=0, backup=False, mode='w')

if not os.path.exists('calc'):
    os.symlink('calc_local', 'calc')

common.system("cp -r potentials calc_local/")
예제 #29
0
파일: test_qha.py 프로젝트: elcorto/pwtools
def unpack(fns):
    for fn in fns:
        common.system('gunzip %s' %fn)
예제 #30
0
"""
matdyn_in_fn = 'matdyn.disp.in'
matdyn_freq_fn = 'matdyn.freq.disp'
mass_str = '\n'.join("amass(%i)=%e" %(ii+1,m) for ii,m in \
                      enumerate(st.mass_unique))
rules = {'XXXNKS': ks_path.shape[0],
         'XXXKS': common.str_arr(ks_path),
         'XXXMASS': mass_str,
         'XXXFNFREQ': matdyn_freq_fn,
         }
txt = common.template_replace(templ_txt,
                              rules,
                              conv=True,
                              mode='txt')
common.file_write(matdyn_in_fn, txt)
common.system("gunzip q2r.fc.gz; matdyn.x < %s; gzip q2r.fc" %matdyn_in_fn)

# parse matdyn output and plot

# define special points path, used in plot_dis() to plot lines at special
# points and make x-labels
sp = kpath.SpecialPointsPath(ks=sp_points, ks_frac=sp_points_frac,
                             symbols=sp_symbols)

# QE 4.x, 5.x
ks, freqs = pwscf.read_matdyn_freq(matdyn_freq_fn)
fig,ax = kpath.plot_dis(kpath.get_path_norm(ks_path), freqs, sp, marker='', ls='-', color='k') 

# QE 5.x
##d = np.loadtxt(matdyn_freq_fn + '.gp')
##fig,ax = kpath.plot_dis(d[:,0], d[:,1:], sp, marker='', ls='-', color='k') 
예제 #31
0
elif sys.argv[1] == 'nvt':    
    ens_txt = nvt_txt
else:
    raise Exception("only nvt / npt allowed")

# create structure file
st = crys.Structure(coords_frac=np.array([[0.0]*3, [.5]*3]),
                    cryst_const=np.array([2.85]*3 + [60]*3),
                    symbols=['Al','N'])
io.write_lammps('lmp.struct', crys.scell(st,(3,3,3)))

# write lmp.in for nvt or npt
common.file_write('lmp.in', lmp_in_templ.format(ensemble=ens_txt))

# run lammps
common.system("mpirun -np 2 lammps < lmp.in", wait=True)

# read trajectory
trtxt_orig = io.read_lammps_md_txt('log.lammps')
trdcd = io.read_lammps_md_dcd('log.lammps')

# plotting
plots = mpl.prepare_plots(['coords', 'coords_frac', 'velocity', 
                           'cryst_const', 'cell'])
for name,pl in plots.items():
    trtxt = trtxt_orig.copy()
    print(name)
    xtxt = getattr(trtxt, name)
    setattr(trtxt, name, None)
    xcalc = eval('trtxt.get_%s()' %name)
    if name == 'cell':
예제 #32
0
파일: batch.py 프로젝트: elcorto/pwtools
 def write_input(self, mode='a', backup=True, sleep=0, excl=True):
     """
     Create calculation dir(s) for each parameter set and write input files
     based on ``templates``. Write sqlite database storing all relevant
     parameters. Write (bash) shell script to start all calculations (run
     locally or submitt batch job file, depending on ``machine.subcmd``).
 
     Parameters
     ----------
     mode : str, optional
         Fine tune how to write input files (based on ``templates``) to calc
         dirs calc_foo/0/, calc_foo/1/, ... . Note that this doesn't change
         the base dir calc_foo at all, only the subdirs for each calc.
         {'a', 'w'}
         
         | 'a': Append mode (default). If a previous database is found, then
         |     subsequent calculations are numbered based on the last 'idx'.
         |     calc_foo/0 # old
         |     calc_foo/1 # old
         |     calc_foo/2 # new
         |     calc_foo/3 # new
         | 'w': Write mode. The target dirs are purged and overwritten. Also,
         |     the database (self.dbfn) is overwritten. Use this to
         |     iteratively tune your inputs, NOT for working on already
         |     present results!
         |     calc_foo/0 # new
         |     calc_foo/1 # new
     backup : bool, optional
         Before writing anything, do a backup of self.calc_dir if it already
         exists.
     sleep : int, optional
         For the script to start (submitt) all jobs: time in seconds for the
         shell sleep(1) commmand.
     excl : bool
         If in append mode, a file <calc_root>/excl_push with all indices of
         calculations from old revisions is written. Can be used with
         ``rsync --exclude-from=excl_push`` when pushing appended new
         calculations to a cluster.
     """
     assert mode in ['a', 'w'], "Unknown mode: '%s'" %mode
     if os.path.exists(self.dbfn):
         if backup:
             common.backup(self.dbfn)
         if mode == 'w':
             os.remove(self.dbfn)
     have_new_db = not os.path.exists(self.dbfn)
     common.makedirs(self.calc_root)
     # this call creates a file ``self.dbfn`` if it doesn't exist
     sqldb = SQLiteDB(self.dbfn, table=self.db_table)
     # max_idx: counter for calc dir numbering
     revision = 0
     if have_new_db:
         max_idx = -1
     else:
         if mode == 'a':
             if sqldb.has_column('idx'):
                 max_idx = sqldb.execute("select max(idx) from %s" \
                 %self.db_table).fetchone()[0]
             else:
                 raise StandardError("database '%s': table '%s' has no "
                       "column 'idx', don't know how to number calcs"
                       %(self.dbfn, self.db_table))
             if sqldb.has_column('revision'):
                 revision = int(sqldb.get_single("select max(revision) \
                     from %s" %self.db_table)) + 1
         elif mode == 'w':
             max_idx = -1
     sql_records = []
     hostnames = []
     for imach, machine in enumerate(self.machines):
         hostnames.append(machine.hostname)
         calc_dir = pj(self.calc_root, self.calc_dir_prefix + \
                       '_%s' %machine.hostname)
         if os.path.exists(calc_dir):
             if backup:
                 common.backup(calc_dir)
             if mode == 'w':
                 common.system("rm -r %s" %calc_dir, wait=True)
         run_txt = "here=$(pwd)\n"
         for _idx, params in enumerate(self.params_lst):
             params = common.flatten(params)
             idx = max_idx + _idx + 1
             calc_subdir = pj(calc_dir, str(idx))
             extra_dct = \
                 {'revision': revision,
                  'study_name': self.study_name,
                  'idx': idx,
                  'calc_name' : self.study_name + "_run%i" %idx,
                  }
             extra_params = [SQLEntry(key=key, sqlval=val) for key,val in \
                             extra_dct.iteritems()]
             # templates[:] to copy b/c they may be modified in Calculation
             calc = Calculation(machine=machine,
                                templates=self.templates[:], 
                                params=params + extra_params,
                                calc_dir=calc_subdir,
                                )
             if mode == 'w' and os.path.exists(calc_subdir):
                 shutil.rmtree(calc_subdir)
             calc.write_input()                               
             run_txt += "cd %i && %s %s && cd $here && sleep %i\n" %(idx,\
                         machine.subcmd, machine.get_jobfile_basename(), sleep)
             if imach == 0:                            
                 sql_records.append(calc.get_sql_record())
         common.file_write(pj(calc_dir, 'run.sh'), run_txt)
     for record in sql_records:
         record['hostname'] = SQLEntry(sqlval=','.join(hostnames))
     # for incomplete parameters: collect header parts from all records and
     # make a set = unique entries
     raw_header = [(key, entry.sqltype.upper()) for record in sql_records \
         for key, entry in record.iteritems()]
     header = list(set(raw_header))
     if have_new_db:
         sqldb.create_table(header)
     else:
         for record in sql_records:
             for key, entry in record.iteritems():
                 if not sqldb.has_column(key):
                     sqldb.add_column(key, entry.sqltype.upper())
     for record in sql_records:
         cmd = "insert into %s (%s) values (%s)"\
             %(self.db_table,
               ",".join(record.keys()),
               ",".join(['?']*len(record.keys())))
         sqldb.execute(cmd, tuple(entry.sqlval for entry in record.itervalues()))
     if excl and revision > 0 and sqldb.has_column('revision'):
         old_idx_lst = [str(x) for x, in sqldb.execute("select idx from calc where \
                                                       revision < ?", (revision,))]
         common.file_write(pj(self.calc_root, 'excl_push'),
                           '\n'.join(old_idx_lst))
     sqldb.finish()
예제 #33
0
templates = [
    batch.FileTemplate(basename=x)
    for x in ['lmp.in', 'lmp.struct', 'lmp.struct.symbols']
]

# rs-AlN
st = crys.Structure(coords_frac=np.array([[0.0] * 3, [0.5] * 3]),
                    symbols=['Al', 'N'],
                    cryst_const=np.array([2.78] * 3 + [60] * 3))

params_lst = []
for target_press in np.linspace(-20, 20, 15):  # GPa, bar in lammps
    params_lst.append([
        sql.SQLEntry(key='target_press', sqlval=target_press * 1e4),
        sql.SQLEntry(key='struct', sqlval=lammps.struct_str(st)),
        sql.SQLEntry(key='symbols', sqlval='\n'.join(st.symbols)),
    ])

calc = batch.ParameterStudy(
    machines=local,
    templates=templates,
    params_lst=params_lst,
    study_name='lammps_ev',
)
calc.write_input(sleep=0, backup=False, mode='w')

if not os.path.exists('calc'):
    os.symlink('calc_local', 'calc')

common.system("cp -r potentials calc_local/")
예제 #34
0
def test_pw_more_forces():    
    fac = Ry / eV / Bohr * Ang

    # MD: london=.true.
    
    filename = 'files/pw.md_london.out'
    common.system('gunzip %s.gz' %filename)
    natoms = 141
    nstep = 10
    # traj case
    pp = PwMDOutputFile(filename=filename)
    tr = pp.get_traj()
    assert tr.natoms == natoms
    assert tr.forces.shape == (nstep,natoms,3)
    assert tr.coords.shape == (nstep,natoms,3)
    assert pp._forces_raw.shape == (nstep+1,2*natoms,3)
    assert np.allclose(tr.forces, pp._forces_raw[1:,:natoms,:] * fac)
   
    # scf case, return only 1st step
    pp = PwSCFOutputFile(filename=filename)
    st = pp.get_struct()
    assert st.natoms == natoms
    assert st.forces.shape == (natoms,3)
    assert st.coords.shape == (natoms,3)
    assert pp._forces_raw.shape == (nstep+1,2*natoms,3)
    assert np.allclose(st.forces, pp._forces_raw[0,:natoms,:] * fac)
    common.system('gzip %s' %filename)
    
    # SCF: verbosity='high' + london=.true.

    filename = 'files/pw.scf_verbose_london.out'
    common.system('gunzip %s.gz' %filename)
    natoms = 4
    nstep = 1
    pp = PwSCFOutputFile(filename=filename)
    st = pp.get_struct()
    assert st.natoms == natoms
    assert st.forces.shape == (natoms,3)
    assert st.coords.shape == (natoms,3)
    assert pp._forces_raw.shape == (nstep, 8*natoms,3)
    assert np.allclose(st.forces, pp._forces_raw[0,:natoms,:] * fac)
    common.system('gzip %s' %filename)


    # MD: verbosity='high' + natoms=1

    filename = 'files/pw.md_one_atom.out'
    common.system('gunzip %s.gz' %filename)
    natoms = 1
    nstep = 4
    # traj case
    pp = PwMDOutputFile(filename=filename)
    tr = pp.get_traj()
    assert tr.natoms == natoms
    assert tr.forces.shape == (nstep,natoms,3)
    assert tr.coords.shape == (nstep,natoms,3)
    assert pp._forces_raw.shape == (nstep+1,7*natoms,3)
    assert np.allclose(tr.forces, pp._forces_raw[1:,:natoms,:] * fac)
   
    # scf case, return only 1st step
    pp = PwSCFOutputFile(filename=filename)
    st = pp.get_struct()
    assert st.natoms == natoms
    assert st.forces.shape == (natoms,3)
    assert st.coords.shape == (natoms,3)
    assert pp._forces_raw.shape == (nstep+1,7*natoms,3)
    assert np.allclose(st.forces, pp._forces_raw[0,:natoms,:] * fac)
    common.system('gzip %s' %filename)
예제 #35
0
def test_pw_scf_out():

    # ref data for Structure, all lengths in Ang, energy in eV
    natoms = 2
    symbols = ['Si', 'Si']
    cell = np.array([[-2.71536701, 0.,
                      2.71536701], [0., 2.71536701, 2.71536701],
                     [-2.71536701, 2.71536701, 0.]])
    forces = np.array([[2.57110316, 5.14220632, 7.71330948],
                       [-2.57110316, -5.14220632, -7.71330948]])  # eV / Ang
    nspecies = {'Si': 2}
    mass = np.array([28.0855, 28.0855])  # amu
    cryst_const = np.array([3.84010885, 3.84010885, 3.84010885, 60., 60., 60.])
    symbols_unique = ['Si']
    etot = -258.58148870118305  # eV
    typat = [1, 1]
    volume = 40.041985843396688  # Ang**3
    stress = np.array([[9.825, 0., 0.], [0., 9.825, 0.], [0., 0.,
                                                          9.825]])  # GPa
    coords_frac = np.array([[0., 0., 0.], [0.25, 0.25, 0.25]])
    pressure = 9.825  # GPa
    coords = np.array([[0., 0., 0.], [-1.35768351, 1.35768351, 1.35768351]])
    order = {'Si': 1}
    alat = 10.2626  # Bohr

    filename = 'files/pw.scf.out'
    common.system('gunzip %s.gz' % filename)

    # use_alat=False. Provide high-precision alat from outside (e.g.
    # from pw.in instead of parsing and using low-precision value from pw.out).
    # Here we use the same alat for the tests.
    pp1 = PwSCFOutputFile(
        filename=filename,
        use_alat=False,  # alat=1.0
        units={'length': alat * Bohr / Ang})
    struct1 = pp1.get_struct()  # pp1.parse() called here
    assert_attrs_not_none(struct1)
    assert_attrs_not_none(pp1)
    assert pp1.scf_converged is True
    assert alat == pp1.get_alat(True)
    assert 1.0 == pp1.get_alat(False)

    aaae(cryst_const, struct1.cryst_const)
    aaae(cell, struct1.cell)
    aaae(coords, struct1.coords)
    aaae(coords_frac, struct1.coords_frac)
    aaae(forces, struct1.forces)
    aaae(stress, struct1.stress)
    assert np.allclose(volume, struct1.volume)
    assert np.allclose(etot, struct1.etot)
    assert np.allclose(pressure, struct1.pressure)

    # use_alat=True, alat = 10.2626 Bohr
    pp2 = PwSCFOutputFile(filename=filename, use_alat=True)
    struct2 = pp2.get_struct()  # pp.parse() called here
    assert_attrs_not_none(struct2)
    assert_attrs_not_none(pp2)
    assert np.allclose(alat, pp2.alat)
    assert pp2.scf_converged is True
    assert alat == pp2.get_alat(True)  # Bohr
    assert 1.0 == pp2.get_alat(False)

    # Skip coords adn cell b/c they are modified by self.alat and
    # pp1.alat = 1.0, pp2.alat = 10.2626
    attr_lst = common.pop_from_list(pp1.attr_lst, ['coords', 'cell'])
    adae(pp1.__dict__, pp2.__dict__, keys=attr_lst)

    attr_lst = struct1.attr_lst
    adae(struct1.__dict__, struct2.__dict__, keys=attr_lst)

    pp3 = PwSCFOutputFile(filename=filename)
    assert alat == pp3.get_alat()  # self.use_alat=True default

    common.system('gzip %s' % filename)