def dump(self, dump_filename, mkdir=True): """Write object to binary file using cPickle. Read back with :func:`~pwtools.io.read_pickle`.""" # Dumping with protocol "2" is supposed to be the fastest binary format # writing method. Probably, this is platform-specific. if mkdir: dr = os.path.dirname(dump_filename) if dr != '': common.makedirs(dr) cPickle.dump(self, open(dump_filename, 'wb'), protocol=2)
def dump(self, dump_filename, mkdir=True): """Write object to binary file using pickle. Read back with :func:`~pwtools.io.read_pickle`.""" # Dumping with protocol "2" is supposed to be the fastest binary format # writing method. Probably, this is platform-specific. if mkdir: dr = os.path.dirname(dump_filename) if dr != '': common.makedirs(dr) pickle.dump(self, open(dump_filename, 'wb'), protocol=2)
def test_lammps_calculator(): if not have_ase(): skip("no ASE found, skipping test") elif not have_lmp(): skip("no lammps found, skipping test") else: at = get_atoms_with_calc_lammps() at.rattle(stdev=0.001, seed=int(time.time())) common.makedirs(at.calc.directory) print(common.backtick("cp -v utils/lammps/AlN.tersoff {p}/".format( p=at.calc.directory))) print("scf") forces = at.get_forces() etot = at.get_potential_energy() stress = at.get_stress(voigt=False) # 3x3 st = io.read_lammps_md_txt(at.calc.label + '.out')[0] assert np.allclose(forces, st.forces) assert np.allclose(etot, st.etot) assert np.allclose(st.stress, -stress * constants.eV_by_Ang3_to_GPa, atol=1e-10) print("relax") from ase.optimize import BFGS opt = BFGS(at, maxstep=0.04) opt.run(fmax=0.001, steps=10) coords_frac = parse.arr2d_from_txt(""" 3.3333341909920072e-01 6.6666683819841532e-01 4.4325467247779138e-03 6.6666681184103216e-01 3.3333362368205072e-01 5.0443254824788963e-01 3.3333341909918301e-01 6.6666683819838046e-01 3.8356759709402671e-01 6.6666681184101539e-01 3.3333362368201563e-01 8.8356759861713752e-01 """) assert np.allclose(coords_frac, at.get_scaled_positions(), atol=1e-2) # at least 1 backup files must exist assert os.path.exists(at.calc.infile + '.0') assert os.path.exists(at.calc.outfile + '.0') assert os.path.exists(at.calc.dumpfile + '.0') assert os.path.exists(at.calc.structfile + '.0')
def test_lammps_calculator(): if not have_ase(): skip("no ASE found, skipping test") elif not have_lmp(): skip("no lammps found, skipping test") else: at = get_atoms_with_calc_lammps() at.rattle(stdev=0.001, seed=int(time.time())) common.makedirs(at.calc.directory) print common.backtick("cp -v utils/lammps/AlN.tersoff {p}/".format( p=at.calc.directory)) print "scf" forces = at.get_forces() etot = at.get_potential_energy() stress = at.get_stress(voigt=False) # 3x3 st = io.read_lammps_md_txt(at.calc.label + '.out')[0] assert np.allclose(forces, st.forces) assert np.allclose(etot, st.etot) assert np.allclose(st.stress, -stress * constants.eV_by_Ang3_to_GPa, atol=1e-10) print "relax" from ase.optimize import BFGS opt = BFGS(at, maxstep=0.04) opt.run(fmax=0.001, steps=10) coords_frac = parse.arr2d_from_txt(""" 3.3333341909920072e-01 6.6666683819841532e-01 4.4325467247779138e-03 6.6666681184103216e-01 3.3333362368205072e-01 5.0443254824788963e-01 3.3333341909918301e-01 6.6666683819838046e-01 3.8356759709402671e-01 6.6666681184101539e-01 3.3333362368201563e-01 8.8356759861713752e-01 """) assert np.allclose(coords_frac, at.get_scaled_positions(), atol=1e-2) # at least 1 backup files must exist assert os.path.exists(at.calc.infile + '.0') assert os.path.exists(at.calc.outfile + '.0') assert os.path.exists(at.calc.dumpfile + '.0') assert os.path.exists(at.calc.structfile + '.0')
a, c = axes_flat[ii, :] fc = 550 - 50 * V[ii] / Vmax phdos.append(np.array([freq, gauss(freq - fc, 100) * 0.01]).T) gibbs = Gibbs(T=T, P=P, etot=etot, phdos=phdos, axes_flat=axes_flat, volfunc_ax=volfunc_ax, case=case, dosarea=None) gibbs.set_fitfunc('C', lambda x, y: num.Spline(x, y, s=None, k=5, eps=1e-5)) g = gibbs.calc_G(calc_all=True) common.makedirs('../files/gibbs/2d') io.write_h5('../files/gibbs/2d/%s.h5' % gethostname(), filt_dct(g), mode='w') # 1d case case = '1d' V = np.linspace(10, 20, nax) axes_flat = V**(1 / 3.) # cubic volfunc_ax = lambda x: x[0]**3.0 etot = (V - V.mean())**2 fcenter = 450 + 100 * (axes_flat - axes_flat.min()) # fake phonon dos data (Gaussian), shift to lower freq for higher volume phdos = [np.array([freq, gauss(freq - fc, 100)]).T for fc in fcenter[::-1]] gibbs = Gibbs(T=T,
def write_input(self, mode='a', backup=True, sleep=0, excl=True): """ Create calculation dir(s) for each parameter set and write input files based on ``templates``. Write sqlite database storing all relevant parameters. Write (bash) shell script to start all calculations (run locally or submitt batch job file, depending on ``machine.subcmd``). Parameters ---------- mode : str, optional Fine tune how to write input files (based on ``templates``) to calc dirs calc_foo/0/, calc_foo/1/, ... . Note that this doesn't change the base dir calc_foo at all, only the subdirs for each calc. {'a', 'w'} | 'a': Append mode (default). If a previous database is found, then | subsequent calculations are numbered based on the last 'idx'. | calc_foo/0 # old | calc_foo/1 # old | calc_foo/2 # new | calc_foo/3 # new | 'w': Write mode. The target dirs are purged and overwritten. Also, | the database (self.dbfn) is overwritten. Use this to | iteratively tune your inputs, NOT for working on already | present results! | calc_foo/0 # new | calc_foo/1 # new backup : bool, optional Before writing anything, do a backup of self.calc_dir if it already exists. sleep : int, optional For the script to start (submitt) all jobs: time in seconds for the shell sleep(1) commmand. excl : bool If in append mode, a file <calc_root>/excl_push with all indices of calculations from old revisions is written. Can be used with ``rsync --exclude-from=excl_push`` when pushing appended new calculations to a cluster. """ assert mode in ['a', 'w'], "Unknown mode: '%s'" %mode if os.path.exists(self.dbfn): if backup: common.backup(self.dbfn) if mode == 'w': os.remove(self.dbfn) have_new_db = not os.path.exists(self.dbfn) common.makedirs(self.calc_root) # this call creates a file ``self.dbfn`` if it doesn't exist sqldb = SQLiteDB(self.dbfn, table=self.db_table) # max_idx: counter for calc dir numbering revision = 0 if have_new_db: max_idx = -1 else: if mode == 'a': if sqldb.has_column('idx'): max_idx = sqldb.execute("select max(idx) from %s" \ %self.db_table).fetchone()[0] else: raise StandardError("database '%s': table '%s' has no " "column 'idx', don't know how to number calcs" %(self.dbfn, self.db_table)) if sqldb.has_column('revision'): revision = int(sqldb.get_single("select max(revision) \ from %s" %self.db_table)) + 1 elif mode == 'w': max_idx = -1 sql_records = [] hostnames = [] for imach, machine in enumerate(self.machines): hostnames.append(machine.hostname) calc_dir = pj(self.calc_root, self.calc_dir_prefix + \ '_%s' %machine.hostname) if os.path.exists(calc_dir): if backup: common.backup(calc_dir) if mode == 'w': common.system("rm -r %s" %calc_dir, wait=True) run_txt = "here=$(pwd)\n" for _idx, params in enumerate(self.params_lst): params = common.flatten(params) idx = max_idx + _idx + 1 calc_subdir = pj(calc_dir, str(idx)) extra_dct = \ {'revision': revision, 'study_name': self.study_name, 'idx': idx, 'calc_name' : self.study_name + "_run%i" %idx, } extra_params = [SQLEntry(key=key, sqlval=val) for key,val in \ extra_dct.iteritems()] # templates[:] to copy b/c they may be modified in Calculation calc = Calculation(machine=machine, templates=self.templates[:], params=params + extra_params, calc_dir=calc_subdir, ) if mode == 'w' and os.path.exists(calc_subdir): shutil.rmtree(calc_subdir) calc.write_input() run_txt += "cd %i && %s %s && cd $here && sleep %i\n" %(idx,\ machine.subcmd, machine.get_jobfile_basename(), sleep) if imach == 0: sql_records.append(calc.get_sql_record()) common.file_write(pj(calc_dir, 'run.sh'), run_txt) for record in sql_records: record['hostname'] = SQLEntry(sqlval=','.join(hostnames)) # for incomplete parameters: collect header parts from all records and # make a set = unique entries raw_header = [(key, entry.sqltype.upper()) for record in sql_records \ for key, entry in record.iteritems()] header = list(set(raw_header)) if have_new_db: sqldb.create_table(header) else: for record in sql_records: for key, entry in record.iteritems(): if not sqldb.has_column(key): sqldb.add_column(key, entry.sqltype.upper()) for record in sql_records: cmd = "insert into %s (%s) values (%s)"\ %(self.db_table, ",".join(record.keys()), ",".join(['?']*len(record.keys()))) sqldb.execute(cmd, tuple(entry.sqlval for entry in record.itervalues())) if excl and revision > 0 and sqldb.has_column('revision'): old_idx_lst = [str(x) for x, in sqldb.execute("select idx from calc where \ revision < ?", (revision,))] common.file_write(pj(self.calc_root, 'excl_push'), '\n'.join(old_idx_lst)) sqldb.finish()
def write_input(self, mode='a', backup=True, sleep=0, excl=True): """ Create calculation dir(s) for each parameter set and write input files based on ``templates``. Write sqlite database storing all relevant parameters. Write (bash) shell script to start all calculations (run locally or submitt batch job file, depending on ``machine.subcmd``). Parameters ---------- mode : str, optional Fine tune how to write input files (based on ``templates``) to calc dirs calc_foo/0/, calc_foo/1/, ... . Note that this doesn't change the base dir calc_foo at all, only the subdirs for each calc. {'a', 'w'} | 'a': Append mode (default). If a previous database is found, then | subsequent calculations are numbered based on the last 'idx'. | calc_foo/0 # old | calc_foo/1 # old | calc_foo/2 # new | calc_foo/3 # new | 'w': Write mode. The target dirs are purged and overwritten. Also, | the database (self.dbfn) is overwritten. Use this to | iteratively tune your inputs, NOT for working on already | present results! | calc_foo/0 # new | calc_foo/1 # new backup : bool, optional Before writing anything, do a backup of self.calc_dir if it already exists. sleep : int, optional For the script to start (submitt) all jobs: time in seconds for the shell sleep(1) commmand. excl : bool If in append mode, a file <calc_root>/excl_push with all indices of calculations from old revisions is written. Can be used with ``rsync --exclude-from=excl_push`` when pushing appended new calculations to a cluster. """ assert mode in ['a', 'w'], "Unknown mode: '%s'" % mode if os.path.exists(self.dbfn): if backup: common.backup(self.dbfn) if mode == 'w': os.remove(self.dbfn) have_new_db = not os.path.exists(self.dbfn) common.makedirs(self.calc_root) # this call creates a file ``self.dbfn`` if it doesn't exist sqldb = SQLiteDB(self.dbfn, table=self.db_table) # max_idx: counter for calc dir numbering revision = 0 if have_new_db: max_idx = -1 else: if mode == 'a': if sqldb.has_column('idx'): max_idx = sqldb.execute("select max(idx) from %s" \ %self.db_table).fetchone()[0] else: raise Exception( "database '%s': table '%s' has no " "column 'idx', don't know how to number calcs" % (self.dbfn, self.db_table)) if sqldb.has_column('revision'): revision = int( sqldb.get_single("select max(revision) \ from %s" % self.db_table)) + 1 elif mode == 'w': max_idx = -1 sql_records = [] hostnames = [] for imach, machine in enumerate(self.machines): hostnames.append(machine.hostname) calc_dir = pj(self.calc_root, self.calc_dir_prefix + \ '_%s' %machine.hostname) if os.path.exists(calc_dir): if backup: common.backup(calc_dir) if mode == 'w': common.system("rm -r %s" % calc_dir, wait=True) run_txt = "here=$(pwd)\n" for _idx, params in enumerate(self.params_lst): params = common.flatten(params) idx = max_idx + _idx + 1 calc_subdir = pj(calc_dir, str(idx)) extra_dct = \ {'revision': revision, 'study_name': self.study_name, 'idx': idx, 'calc_name' : self.study_name + "_run%i" %idx, } extra_params = [SQLEntry(key=key, sqlval=val) for key,val in \ extra_dct.items()] # templates[:] to copy b/c they may be modified in Calculation calc = Calculation( machine=machine, templates=self.templates[:], params=params + extra_params, calc_dir=calc_subdir, ) if mode == 'w' and os.path.exists(calc_subdir): shutil.rmtree(calc_subdir) calc.write_input() run_txt += "cd %i && %s %s && cd $here && sleep %i\n" %(idx,\ machine.subcmd, machine.get_jobfile_basename(), sleep) if imach == 0: sql_records.append(calc.get_sql_record()) common.file_write(pj(calc_dir, 'run.sh'), run_txt) for record in sql_records: record['hostname'] = SQLEntry(sqlval=','.join(hostnames)) # for incomplete parameters: collect header parts from all records and # make a set = unique entries raw_header = [(key, entry.sqltype.upper()) for record in sql_records \ for key, entry in record.items()] header = list(set(raw_header)) if have_new_db: sqldb.create_table(header) else: for record in sql_records: for key, entry in record.items(): if not sqldb.has_column(key): sqldb.add_column(key, entry.sqltype.upper()) for record in sql_records: cmd = "insert into %s (%s) values (%s)"\ %(self.db_table, ",".join(list(record.keys())), ",".join(['?']*len(list(record.keys())))) sqldb.execute(cmd, tuple(entry.sqlval for entry in record.values())) if excl and revision > 0 and sqldb.has_column('revision'): old_idx_lst = [ str(x) for x, in sqldb.execute( "select idx from calc where \ revision < ?", ( revision, )) ] common.file_write(pj(self.calc_root, 'excl_push'), '\n'.join(old_idx_lst)) sqldb.finish()
cell_c_min = cell_c.min() etot = np.array([(a-cell_a_mean)**2.0 + (c-cell_c_mean)**2.0 for a,c in axes_flat]) phdos = [] Vmax = V.max() # phonon dos (just a gaussian) shifted to lower (higher) freqs for higher # (lower) volume for ii in range(axes_flat.shape[0]): a,c = axes_flat[ii,:] fc = 550 - 50 * V[ii] / Vmax phdos.append(np.array([freq,gauss(freq-fc,100)*0.01]).T) gibbs = Gibbs(T=T, P=P, etot=etot, phdos=phdos, axes_flat=axes_flat, volfunc_ax=volfunc_ax, case=case, dosarea=None) gibbs.set_fitfunc('C', lambda x,y: num.Spline(x,y,s=None,k=5, eps=1e-5)) g = gibbs.calc_G(calc_all=True) common.makedirs('../files/gibbs/2d') io.write_h5('../files/gibbs/2d/%s.h5' %gethostname(), filt_dct(g), mode='w') # 1d case case = '1d' V = np.linspace(10,20,nax) axes_flat = V**(1/3.) # cubic volfunc_ax = lambda x: x[0]**3.0 etot = (V-V.mean())**2 fcenter = 450 + 100*(axes_flat - axes_flat.min()) # fake phonon dos data (Gaussian), shift to lower freq for higher volume phdos = [np.array([freq,gauss(freq-fc, 100)]).T for fc in fcenter[::-1]] gibbs = Gibbs(T=T, P=P, etot=etot, phdos=phdos, axes_flat=axes_flat,