def compute(self, output_dir): outcar = os.path.join(output_dir, 'OUTCAR') if not os.path.isfile(outcar): dlog.warning("cannot find OUTCAR in " + output_dir + " skip") return None else: ls = LabeledSystem(outcar) stress = [] with open(outcar, 'r') as fin: lines = fin.read().split('\n') for line in lines: if 'in kB' in line: stress_xx = float(line.split()[2]) stress_yy = float(line.split()[3]) stress_zz = float(line.split()[4]) stress_xy = float(line.split()[5]) stress_yz = float(line.split()[6]) stress_zx = float(line.split()[7]) stress.append([]) stress[-1].append([stress_xx, stress_xy, stress_zx]) stress[-1].append([stress_xy, stress_yy, stress_yz]) stress[-1].append([stress_zx, stress_yz, stress_zz]) outcar_dict = ls.as_dict() outcar_dict['data']['stress'] = { "@module": "numpy", "@class": "array", "dtype": "float64", "data": stress } return outcar_dict
def testEntry(self): entries = [] for i, f in enumerate(self.iter_path): vi = VaspInput.from_directory(f) ls = LabeledSystem(os.path.join(f, 'OUTCAR')) attrib = loadfn(os.path.join(f, 'job.json')) comp = vi['POSCAR'].structure.composition entry = Entry(comp, 'vasp', vi.as_dict(), ls.as_dict(), entry_id='pku-' + str(i), attribute=attrib) entries.append(entry) self.assertEqual(len(entries), len(self.ref_entries)) ret0 = entries[0] r0 = self.ref_entries[0] self.assertEqual(Incar.from_dict(ret0.inputs['INCAR']), Incar.from_dict(r0.inputs['INCAR'])) self.assertEqual(str(r0.inputs['KPOINTS']), str(Kpoints.from_dict(ret0.inputs['KPOINTS']))) self.assertEqual(ret0.inputs['POTCAR'], r0.inputs['POTCAR'].as_dict()) self.assertEqual( Poscar.from_dict(ret0.inputs['POSCAR']).structure, r0.inputs['POSCAR'].structure) self.assertEqual(ret0.entry_id, 'pku-0')
def test(): from monty.serialization import dumpfn, loadfn from monty.json import MontyDecoder, MontyEncoder from pymatgen.io.vasp.inputs import PotcarSingle, Potcar vi = VaspInput.from_directory('.') ls = LabeledSystem('OUTCAR', fmt='vasp/outcar') en0 = Entry('Al', 'vasp', inputs=vi.as_dict(), data=ls.as_dict(), entry_id='pku-1') print(en0) fname = 'pku-1.json' dumpfn(en0.as_dict(), fname, indent=4) en1 = Entry.load(fname) #vin=VaspInput.from_dict(en1.inputs) #vin.write_input('./new') print(en1) print(en1.as_dict())
def _parsing_vasp(paths, id_prefix, iters=True): entries = [] icount = 0 for path in paths: f_outcar = os.path.join(path, 'OUTCAR') f_job = os.path.join(path, 'job.json') try: vi = VaspInput.from_directory(path) if os.path.isfile(f_job): attrib = loadfn(f_job) else: attrib = {} if iters and attrib: tmp_ = path.split('/')[-1] iter_info = tmp_.split('.')[1] task_info = tmp_.split('.')[-1] attrib['iter_info'] = iter_info attrib['task_info'] = task_info else: pass comp = vi['POSCAR'].structure.composition ls = LabeledSystem(f_outcar) lss = ls.to_list() for ls in lss: if id_prefix: eid = id_prefix + "_" + str(icount) else: eid = str(uuid4()) entry = Entry(comp, 'vasp', vi.as_dict(), ls.as_dict(), attribute=attrib, entry_id=eid) entries.append(entry) icount += 1 except: dlog.info("failed here : %s" % path) return entries
def _parsing_vasp(paths, config_info_dict, id_prefix, iters=True): entries = [] icount = 0 if iters: iter_record = [] iter_record_new = [] try: with open("record.database", "r") as f_record: iter_record = [i.split()[0] for i in f_record.readlines()] iter_record.sort() dlog.info("iter_record") dlog.info(iter_record) except: pass for path in paths: try: f_outcar = os.path.join(path, 'OUTCAR') f_job = os.path.join(path, 'job.json') tmp_iter = path.split('/')[-3] if (tmp_iter in iter_record) and (tmp_iter != iter_record[-1]): continue if tmp_iter not in iter_record_new: iter_record_new.append(tmp_iter) vi = VaspInput.from_directory(path) if os.path.isfile(f_job): attrib = loadfn(f_job) else: attrib = {} if iters and attrib: # generator/Cu/iter.000031/02.fp/task.007.000000 tmp_ = path.split('/')[-1] #config_info=tmp_.split('.')[1] task_info = tmp_.split('.')[-1] tmp_iter = path.split('/')[-3] iter_info = tmp_iter.split('.')[-1] sys_info = path.split('/')[-4] config_info_int = int(tmp_.split('.')[1]) for (key, value) in config_info_dict.items(): if config_info_int in value: config_info = key attrib['config_info'] = config_info attrib['task_info'] = task_info attrib['iter_info'] = iter_info attrib['sys_info'] = sys_info with open(f_outcar, "r") as fin_outcar: infile_outcar = fin_outcar.readlines() for line in infile_outcar: if "running on" in line: attrib["core"] = int(line.split()[2]) if "Elapse" in line: attrib["wall_time"] = float(line.split()[-1]) if "executed on" in line: attrib["date"] = line.split()[-2] attrib["clocktime"] = line.split()[-1] dlog.info("Attrib") dlog.info(attrib) comp = vi['POSCAR'].structure.composition ls = LabeledSystem(f_outcar) lss = ls.to_list() for ls in lss: if id_prefix: eid = id_prefix + "_" + str(icount) else: eid = str(uuid4()) entry = Entry(comp, 'vasp', vi.as_dict(), ls.as_dict(), attribute=attrib, entry_id=eid) entries.append(entry) icount += 1 except Exception: #dlog.info(str(Exception)) dlog.info("failed for %s" % (path)) #pass if iters: iter_record.sort() iter_record_new.sort() with open("record.database", "w") as fw: for line in iter_record: fw.write(line + "\n") for line in iter_record_new: fw.write(line + "\n") return entries