def add_data_entry(self, path): """ parse the data found in path and add it to data """ try: xml = Vasprun(os.path.join(path, "vasprun.xml")) out = Outcar(os.path.join(path, "OUTCAR")) if xml.converged or True: entry = { 'system': path.split('/')[1].split('_par')[0], "NPAR": xml.parameters.get('NPAR'), 'nband': xml.parameters.get('NBANDS'), 'ncpus': int(out.run_stats['cores']), "final_energy": xml.final_energy, "vasp_version": xml.vasp_version, "generator": xml.generator, "generator_hash": hash(frozenset(xml.generator)), "run_stats": out.run_stats } entry_hash = hash((entry['ncpus'], entry['NPAR'], entry['generator_hash'], entry['system'])) #print(entry) self.data.update({str(entry_hash): entry}) print(entry['ncpus'], entry['NPAR'], entry['generator_hash'], entry['system']) except (ParseError, ValueError, IOError): try: out = Outcar(os.path.join(path, "OUTCAR")) inc = Incar(os.path.join(path, "INCAR")) entry = { "NPAR": inc.as_dict()['NPAR'], "nband": inc.as_dict()['nband'], 'ncpus': int(out.run_stats['cores']), "final_energy": -1, "vasp_version": 'v', "generator": {}, "generator_hash": hash(' '), "run_stats": out.run_stats } entry_hash = hash((entry['ncpus'], entry['NPAR'], entry['generator_hash'], entry['system'])) log(entry) self.data.update({str(entry_hash): entry}) print(entry['ncpus'], entry['NPAR'], entry['generator_hash'], entry['system']) except (ParseError, ValueError, IOError): print('parsing error') pass
def run_task(self, fw_spec): try: vasp_run = Vasprun("vasprun.xml", parse_dos=False, parse_eigen=False) outcar = Outcar(os.path.join(os.getcwd(), "OUTCAR")) except Exception as e: raise RuntimeError("Can't get valid results from relaxed run: " + str(e)) user_incar_settings = MPNonSCFVaspInputSet.get_incar_settings( vasp_run, outcar) user_incar_settings.update({"NPAR": 2}) structure = MPNonSCFVaspInputSet.get_structure(vasp_run, outcar, initial_structure=True) if self.line: mpnscfvip = MPNonSCFVaspInputSet(user_incar_settings, mode="Line") for k, v in mpnscfvip.get_all_vasp_input( structure, generate_potcar=True).items(): v.write_file(os.path.join(os.getcwd(), k)) kpath = HighSymmKpath(structure) else: mpnscfvip = MPNonSCFVaspInputSet(user_incar_settings, mode="Uniform") for k, v in mpnscfvip.get_all_vasp_input( structure, generate_potcar=True).items(): v.write_file(os.path.join(os.getcwd(), k)) if self.line: return FWAction(stored_data={"kpath": kpath.kpath, "kpath_name": kpath.name}) else: return FWAction()
def from_previous_vasp_run(previous_vasp_dir, output_dir='.', user_incar_settings=None, make_dir_if_not_present=True): """ Generate a set of Vasp input files for static calculations from a directory of previous Vasp run. Args: previous_vasp_dir: The directory contains the outputs(vasprun.xml and OUTCAR) of previous vasp run. output_dir: The directory to write the VASP input files for the static calculations. Default to write in the current directory. make_dir_if_not_present: Set to True if you want the directory (and the whole path) to be created if it is not present. """ try: vasp_run = Vasprun(os.path.join(previous_vasp_dir, "vasprun.xml"), parse_dos=False, parse_eigen=None) outcar = Outcar(os.path.join(previous_vasp_dir, "OUTCAR")) except: traceback.format_exc() raise RuntimeError("Can't get valid results from previous run") structure = MPStaticVaspInputSet.get_structure( vasp_run, outcar) mpsvip = MPStaticVaspInputSet( user_incar_settings=user_incar_settings) mpsvip.write_input(structure, output_dir, make_dir_if_not_present)
def get_fermi_energy(self, directory): """Returns the Fermi energy from a directory""" abspath = '%s/%s/' % (self.directory, directory) if ('vasprun.xml' in os.listdir(abspath)): return Vasprun('%s/vasprun.xml' % abspath).efermi elif ('OUTCAR' in os.listdir(abspath)): return Outcar('%s/OUTCAR' % abspath).efermi
def test_single_atom(self): filepath = os.path.join(test_dir, "OUTCAR.Al") outcar = Outcar(filepath) expected_mag = ({u'p': 0.0, u's': 0.0, u'd': 0.0, u'tot': 0.0},) expected_chg = ({u'p': 0.343, u's': 0.425, u'd': 0.0, u'tot': 0.768},) self.assertAlmostEqual(outcar.magnetization, expected_mag) self.assertAlmostEqual(outcar.charge, expected_chg) self.assertFalse(outcar.is_stopped) self.assertEqual(outcar.run_stats, {'System time (sec)': 0.592, 'Total CPU time used (sec)': 50.194, 'Elapsed time (sec)': 52.337, 'Maximum memory used (kb)': 62900.0, 'Average memory used (kb)': 0.0, 'User time (sec)': 49.602, 'cores': '32'}) self.assertAlmostEqual(outcar.efermi, 8.0942) self.assertAlmostEqual(outcar.nelect, 3) self.assertAlmostEqual(outcar.total_mag, 8.2e-06) self.assertIsNotNone(outcar.as_dict())
def test_init(self): for f in ['OUTCAR', 'OUTCAR.gz']: filepath = os.path.join(test_dir, f) outcar = Outcar(filepath) expected_mag = ({'d': 0.0, 'p': 0.003, 's': 0.002, 'tot': 0.005}, {'d': 0.798, 'p': 0.008, 's': 0.007, 'tot': 0.813}, {'d': 0.798, 'p': 0.008, 's': 0.007, 'tot': 0.813}, {'d': 0.0, 'p':-0.117, 's': 0.005, 'tot':-0.112}, {'d': 0.0, 'p':-0.165, 's': 0.004, 'tot':-0.162}, {'d': 0.0, 'p':-0.117, 's': 0.005, 'tot':-0.112}, {'d': 0.0, 'p':-0.165, 's': 0.004, 'tot':-0.162}) expected_chg = ({'p': 0.154, 's': 0.078, 'd': 0.0, 'tot': 0.232}, {'p': 0.707, 's': 0.463, 'd': 8.316, 'tot': 9.486}, {'p': 0.707, 's': 0.463, 'd': 8.316, 'tot': 9.486}, {'p': 3.388, 's': 1.576, 'd': 0.0, 'tot': 4.964}, {'p': 3.365, 's': 1.582, 'd': 0.0, 'tot': 4.947}, {'p': 3.388, 's': 1.576, 'd': 0.0, 'tot': 4.964}, {'p': 3.365, 's': 1.582, 'd': 0.0, 'tot': 4.947}) self.assertAlmostEqual(outcar.magnetization, expected_mag, 5, "Wrong magnetization read from Outcar") self.assertAlmostEqual(outcar.charge, expected_chg, 5, "Wrong charge read from Outcar") self.assertFalse(outcar.is_stopped) self.assertEqual(outcar.run_stats, {'System time (sec)': 0.938, 'Total CPU time used (sec)': 545.142, 'Elapsed time (sec)': 546.709, 'Maximum memory used (kb)': 0.0, 'Average memory used (kb)': 0.0, 'User time (sec)': 544.204, 'cores': '8'}) self.assertAlmostEqual(outcar.efermi, 2.0112) self.assertAlmostEqual(outcar.nelect, 44.9999991) self.assertAlmostEqual(outcar.total_mag, 0.9999998) self.assertIsNotNone(outcar.as_dict()) filepath = os.path.join(test_dir, 'OUTCAR.stopped') outcar = Outcar(filepath) self.assertTrue(outcar.is_stopped)
def run_task(self, fw_spec): user_incar_settings = {"NPAR": 2} MPStaticVaspInputSet.from_previous_vasp_run(os.getcwd(), user_incar_settings=user_incar_settings) structure = MPStaticVaspInputSet.get_structure(Vasprun("vasprun.xml"), Outcar("OUTCAR"), initial_structure=False, additional_info=True) return FWAction(stored_data={'refined_structure': structure[1][0].to_dict, 'conventional_standard_structure': structure[1][1].to_dict, 'symmetry_dataset': structure[1][2], 'symmetry_operations': [x.to_dict for x in structure[1][3]]})
def test_init(self): filepath = os.path.join(test_dir, 'OUTCAR') outcar = Outcar(filepath) expected_mag = ({'d': 0.0, 'p': 0.003, 's': 0.002, 'tot': 0.005}, {'d': 0.798, 'p': 0.008, 's': 0.007, 'tot': 0.813}, {'d': 0.798, 'p': 0.008, 's': 0.007, 'tot': 0.813}, {'d': 0.0, 'p':-0.117, 's': 0.005, 'tot':-0.112}, {'d': 0.0, 'p':-0.165, 's': 0.004, 'tot':-0.162}, {'d': 0.0, 'p':-0.117, 's': 0.005, 'tot':-0.112}, {'d': 0.0, 'p':-0.165, 's': 0.004, 'tot':-0.162}) expected_chg = ({'p': 0.154, 's': 0.078, 'd': 0.0, 'tot': 0.232}, {'p': 0.707, 's': 0.463, 'd': 8.316, 'tot': 9.486}, {'p': 0.707, 's': 0.463, 'd': 8.316, 'tot': 9.486}, {'p': 3.388, 's': 1.576, 'd': 0.0, 'tot': 4.964}, {'p': 3.365, 's': 1.582, 'd': 0.0, 'tot': 4.947}, {'p': 3.388, 's': 1.576, 'd': 0.0, 'tot': 4.964}, {'p': 3.365, 's': 1.582, 'd': 0.0, 'tot': 4.947}) self.assertAlmostEqual(outcar.magnetization, expected_mag, 5, "Wrong magnetization read from Outcar") self.assertAlmostEqual(outcar.charge, expected_chg, 5, "Wrong charge read from Outcar") self.assertFalse(outcar.is_stopped) self.assertEqual(outcar.run_stats, {'System time (sec)': 0.938, 'Total CPU time used (sec)': 545.142, 'Elapsed time (sec)': 546.709, 'Maximum memory used (kb)': 0.0, 'Average memory used (kb)': 0.0, 'User time (sec)': 544.204}) self.assertAlmostEqual(outcar.efermi, 2.0112) self.assertAlmostEqual(outcar.nelect, 44.9999991) self.assertAlmostEqual(outcar.total_mag, 0.9999998) self.assertIsNotNone(outcar.to_dict) filepath = os.path.join(test_dir, 'OUTCAR.stopped') outcar = Outcar(filepath) self.assertTrue(outcar.is_stopped)
def test_single_atom(self): filepath = os.path.join(test_dir, "OUTCAR.Al") outcar = Outcar(filepath) expected_mag = ({u'p': 0.0, u's': 0.0, u'd': 0.0, u'tot': 0.0}, ) expected_chg = ({u'p': 0.343, u's': 0.425, u'd': 0.0, u'tot': 0.768}, ) self.assertAlmostEqual(outcar.magnetization, expected_mag) self.assertAlmostEqual(outcar.charge, expected_chg) self.assertFalse(outcar.is_stopped) self.assertEqual( outcar.run_stats, { 'System time (sec)': 0.592, 'Total CPU time used (sec)': 50.194, 'Elapsed time (sec)': 52.337, 'Maximum memory used (kb)': 62900.0, 'Average memory used (kb)': 0.0, 'User time (sec)': 49.602, 'cores': '32' }) self.assertAlmostEqual(outcar.efermi, 8.0942) self.assertAlmostEqual(outcar.nelect, 3) self.assertAlmostEqual(outcar.total_mag, 8.2e-06) self.assertIsNotNone(outcar.as_dict())
def from_previous_vasp_run(previous_vasp_dir, output_dir='.', mode="Uniform", user_incar_settings=None, copy_chgcar=True, make_dir_if_not_present=True): """ Generate a set of Vasp input files for NonSCF calculations from a directory of previous static Vasp run. Args: previous_vasp_dir: The directory contains the outputs(vasprun.xml and OUTCAR) of previous vasp run. output_dir: The directory to write the VASP input files for the NonSCF calculations. Default to write in the current directory. copy_chgcar: Default to copy CHGCAR from SC run make_dir_if_not_present: Set to True if you want the directory (and the whole path) to be created if it is not present. """ try: vasp_run = Vasprun(os.path.join(previous_vasp_dir, "vasprun.xml"), parse_dos=False, parse_eigen=None) outcar = Outcar(os.path.join(previous_vasp_dir, "OUTCAR")) except: traceback.format_exc() raise RuntimeError("Can't get valid results from previous run") #Get a Magmom-decorated structure structure = MPNonSCFVaspInputSet.get_structure(vasp_run, outcar) user_incar_settings = MPNonSCFVaspInputSet.get_incar_settings( vasp_run, outcar) mpnscfvip = MPNonSCFVaspInputSet(user_incar_settings, mode) mpnscfvip.write_input(structure, output_dir, make_dir_if_not_present) if copy_chgcar: try: shutil.copyfile(os.path.join(previous_vasp_dir, "CHGCAR"), os.path.join(output_dir, "CHGCAR")) except Exception as e: traceback.format_exc() raise RuntimeError("Can't copy CHGCAR from SC run" + '\n' + str(e))
def test_core_state_eigen(self): filepath = os.path.join(test_dir, "OUTCAR.CL") cl = Outcar(filepath).read_core_state_eigen() self.assertAlmostEqual(cl[6]["2s"][-1], -174.4779)
def test_init(self): for f in ['OUTCAR', 'OUTCAR.gz']: filepath = os.path.join(test_dir, f) outcar = Outcar(filepath) expected_mag = ({'d': 0.0, 'p': 0.003, 's': 0.002, 'tot': 0.005}, {'d': 0.798, 'p': 0.008, 's': 0.007, 'tot': 0.813}, {'d': 0.798, 'p': 0.008, 's': 0.007, 'tot': 0.813}, {'d': 0.0, 'p':-0.117, 's': 0.005, 'tot':-0.112}, {'d': 0.0, 'p':-0.165, 's': 0.004, 'tot':-0.162}, {'d': 0.0, 'p':-0.117, 's': 0.005, 'tot':-0.112}, {'d': 0.0, 'p':-0.165, 's': 0.004, 'tot':-0.162}) expected_chg = ({'p': 0.154, 's': 0.078, 'd': 0.0, 'tot': 0.232}, {'p': 0.707, 's': 0.463, 'd': 8.316, 'tot': 9.486}, {'p': 0.707, 's': 0.463, 'd': 8.316, 'tot': 9.486}, {'p': 3.388, 's': 1.576, 'd': 0.0, 'tot': 4.964}, {'p': 3.365, 's': 1.582, 'd': 0.0, 'tot': 4.947}, {'p': 3.388, 's': 1.576, 'd': 0.0, 'tot': 4.964}, {'p': 3.365, 's': 1.582, 'd': 0.0, 'tot': 4.947}) self.assertAlmostEqual(outcar.magnetization, expected_mag, 5, "Wrong magnetization read from Outcar") self.assertAlmostEqual(outcar.charge, expected_chg, 5, "Wrong charge read from Outcar") self.assertFalse(outcar.is_stopped) self.assertEqual(outcar.run_stats, {'System time (sec)': 0.938, 'Total CPU time used (sec)': 545.142, 'Elapsed time (sec)': 546.709, 'Maximum memory used (kb)': 0.0, 'Average memory used (kb)': 0.0, 'User time (sec)': 544.204, 'cores': '8'}) self.assertAlmostEqual(outcar.efermi, 2.0112) self.assertAlmostEqual(outcar.nelect, 44.9999991) self.assertAlmostEqual(outcar.total_mag, 0.9999998) self.assertIsNotNone(outcar.as_dict()) filepath = os.path.join(test_dir, 'OUTCAR.stopped') outcar = Outcar(filepath) self.assertTrue(outcar.is_stopped) for f in ['OUTCAR.lepsilon', 'OUTCAR.lepsilon.gz']: filepath = os.path.join(test_dir, f) outcar = Outcar(filepath) outcar.read_lepsilon() outcar.read_lepsilon_ionic() self.assertAlmostEqual(outcar.dielectric_tensor[0][0], 3.716432) self.assertAlmostEqual(outcar.dielectric_tensor[0][1], -0.20464) self.assertAlmostEqual(outcar.dielectric_tensor[1][2], -0.20464) self.assertAlmostEqual(outcar.dielectric_ionic_tensor[0][0], 0.001419) self.assertAlmostEqual(outcar.dielectric_ionic_tensor[0][2], 0.001419) self.assertAlmostEqual(outcar.dielectric_ionic_tensor[2][2], 0.001419) self.assertAlmostEqual(outcar.piezo_tensor[0][0], 0.52799) self.assertAlmostEqual(outcar.piezo_tensor[1][3], 0.35998) self.assertAlmostEqual(outcar.piezo_tensor[2][5], 0.35997) self.assertAlmostEqual(outcar.piezo_ionic_tensor[0][0], 0.05868) self.assertAlmostEqual(outcar.piezo_ionic_tensor[1][3], 0.06241) self.assertAlmostEqual(outcar.piezo_ionic_tensor[2][5], 0.06242) self.assertAlmostEqual(outcar.born[0][1][2], -0.385) self.assertAlmostEqual(outcar.born[1][2][0], 0.36465)
import pymatgen from pymatgen.io.vaspio.vasp_output import Outcar import os import pickle import numpy as np outcar = Outcar('OUTCAR') magarray = [] magx = outcar.magnetizationx magy = outcar.magnetizationy magz = outcar.magnetizationz magxlist = [x["tot"] for x in magx] magylist = [y["tot"] for y in magy] magzlist = [z["tot"] for z in magz] strlist = [] for i in range(0, len(magxlist)): # change this if loop to select only the magnetic species of interest # this example was used for a 48 atom unit cell where atoms 41-48 were magnetic if i >= 0: strlist.append( str(magxlist[i]) + " " + str(magylist[i]) + " " + str(magzlist[i])) magstr = " ".join(strlist) print magstr
def test_init(self): for f in ['OUTCAR', 'OUTCAR.gz']: filepath = os.path.join(test_dir, f) outcar = Outcar(filepath) expected_mag = ({ 'd': 0.0, 'p': 0.003, 's': 0.002, 'tot': 0.005 }, { 'd': 0.798, 'p': 0.008, 's': 0.007, 'tot': 0.813 }, { 'd': 0.798, 'p': 0.008, 's': 0.007, 'tot': 0.813 }, { 'd': 0.0, 'p': -0.117, 's': 0.005, 'tot': -0.112 }, { 'd': 0.0, 'p': -0.165, 's': 0.004, 'tot': -0.162 }, { 'd': 0.0, 'p': -0.117, 's': 0.005, 'tot': -0.112 }, { 'd': 0.0, 'p': -0.165, 's': 0.004, 'tot': -0.162 }) expected_chg = ({ 'p': 0.154, 's': 0.078, 'd': 0.0, 'tot': 0.232 }, { 'p': 0.707, 's': 0.463, 'd': 8.316, 'tot': 9.486 }, { 'p': 0.707, 's': 0.463, 'd': 8.316, 'tot': 9.486 }, { 'p': 3.388, 's': 1.576, 'd': 0.0, 'tot': 4.964 }, { 'p': 3.365, 's': 1.582, 'd': 0.0, 'tot': 4.947 }, { 'p': 3.388, 's': 1.576, 'd': 0.0, 'tot': 4.964 }, { 'p': 3.365, 's': 1.582, 'd': 0.0, 'tot': 4.947 }) self.assertAlmostEqual(outcar.magnetization, expected_mag, 5, "Wrong magnetization read from Outcar") self.assertAlmostEqual(outcar.charge, expected_chg, 5, "Wrong charge read from Outcar") self.assertFalse(outcar.is_stopped) self.assertEqual( outcar.run_stats, { 'System time (sec)': 0.938, 'Total CPU time used (sec)': 545.142, 'Elapsed time (sec)': 546.709, 'Maximum memory used (kb)': 0.0, 'Average memory used (kb)': 0.0, 'User time (sec)': 544.204, 'cores': '8' }) self.assertAlmostEqual(outcar.efermi, 2.0112) self.assertAlmostEqual(outcar.nelect, 44.9999991) self.assertAlmostEqual(outcar.total_mag, 0.9999998) self.assertIsNotNone(outcar.as_dict()) filepath = os.path.join(test_dir, 'OUTCAR.stopped') outcar = Outcar(filepath) self.assertTrue(outcar.is_stopped) for f in ['OUTCAR.lepsilon', 'OUTCAR.lepsilon.gz']: filepath = os.path.join(test_dir, f) outcar = Outcar(filepath) outcar.read_lepsilon() outcar.read_lepsilon_ionic() self.assertAlmostEqual(outcar.dielectric_tensor[0][0], 3.716432) self.assertAlmostEqual(outcar.dielectric_tensor[0][1], -0.20464) self.assertAlmostEqual(outcar.dielectric_tensor[1][2], -0.20464) self.assertAlmostEqual(outcar.dielectric_ionic_tensor[0][0], 0.001419) self.assertAlmostEqual(outcar.dielectric_ionic_tensor[0][2], 0.001419) self.assertAlmostEqual(outcar.dielectric_ionic_tensor[2][2], 0.001419) self.assertAlmostEqual(outcar.piezo_tensor[0][0], 0.52799) self.assertAlmostEqual(outcar.piezo_tensor[1][3], 0.35998) self.assertAlmostEqual(outcar.piezo_tensor[2][5], 0.35997) self.assertAlmostEqual(outcar.piezo_ionic_tensor[0][0], 0.05868) self.assertAlmostEqual(outcar.piezo_ionic_tensor[1][3], 0.06241) self.assertAlmostEqual(outcar.piezo_ionic_tensor[2][5], 0.06242) self.assertAlmostEqual(outcar.born[0][1][2], -0.385) self.assertAlmostEqual(outcar.born[1][2][0], 0.36465)
def from_previous_vasp_run(previous_vasp_dir, output_dir='.', user_incar_settings=None, make_dir_if_not_present=True): """ Generate a set of Vasp input files for static calculations from a directory of previous Vasp run. Args: previous_vasp_dir: The directory contains the outputs(vasprun.xml and OUTCAR) of previous vasp run. output_dir: The directory to write the VASP input files for the static calculations. Default to write in the current directory. make_dir_if_not_present: Set to True if you want the directory (and the whole path) to be created if it is not present. """ try: vasp_run = Vasprun(os.path.join(previous_vasp_dir, "vasprun.xml"), parse_dos=False, parse_eigen=None) outcar = Outcar(os.path.join(previous_vasp_dir, "OUTCAR")) previous_incar = vasp_run.incar previous_kpoints = vasp_run.kpoints previous_final_structure = vasp_run.final_structure except: traceback.format_exc() raise RuntimeError("Can't get valid results from previous run") structure = MPStaticVaspInputSet.get_structure(vasp_run, outcar) mpsvip = MPStaticVaspInputSet() mpsvip.write_input(structure, output_dir, make_dir_if_not_present) #new_incar = Incar.from_file(os.path.join(output_dir, "INCAR")) new_incar = mpsvip.get_incar(structure) # Use previous run INCAR and override necessary parameters previous_incar.update({ "IBRION": -1, "ISMEAR": -5, "LAECHG": True, "LCHARG": True, "LORBIT": 11, "LVHAR": True, "LWAVE": False, "NSW": 0, "ICHARG": 0 }) for incar_key in ["MAGMOM", "NUPDOWN"]: if new_incar.get(incar_key, None): previous_incar.update({incar_key: new_incar[incar_key]}) else: previous_incar.pop(incar_key, None) # use new LDAUU when possible b/c the Poscar might have changed # representation if previous_incar.get('LDAU'): u = previous_incar.get('LDAUU', []) j = previous_incar.get('LDAUJ', []) if sum([u[x] - j[x] for x, y in enumerate(u)]) > 0: for tag in ('LDAUU', 'LDAUL', 'LDAUJ'): previous_incar.update({tag: new_incar[tag]}) # Compare ediff between previous and staticinputset values, # choose the tighter ediff previous_incar.update( {"EDIFF": min(previous_incar.get("EDIFF", 1), new_incar["EDIFF"])}) # add user settings if user_incar_settings: previous_incar.update(user_incar_settings) previous_incar.write_file(os.path.join(output_dir, "INCAR")) # Prefer to use k-point scheme from previous run previous_kpoints_density = np.prod(previous_kpoints.kpts[0]) / \ previous_final_structure.lattice.reciprocal_lattice.volume new_kpoints_density = max(previous_kpoints_density, 90) new_kpoints = mpsvip.get_kpoints(structure, kpoints_density=new_kpoints_density) if previous_kpoints.style[0] != new_kpoints.style[0]: if previous_kpoints.style[0] == "M" and \ SymmetryFinder(structure, 0.01).get_lattice_type() != \ "hexagonal": k_div = (kp + 1 if kp % 2 == 1 else kp for kp in new_kpoints.kpts[0]) Kpoints.monkhorst_automatic(k_div). \ write_file(os.path.join(output_dir, "KPOINTS")) else: Kpoints.gamma_automatic(new_kpoints.kpts[0]). \ write_file(os.path.join(output_dir, "KPOINTS")) else: new_kpoints.write_file(os.path.join(output_dir, "KPOINTS"))
def assimilate(self, path, launches_coll=None): """ Parses vasp runs. Then insert the result into the db. and return the task_id or doc of the insertion. Returns: If in simulate_mode, the entire doc is returned for debugging purposes. Else, only the task_id of the inserted doc is returned. """ d = self.get_task_doc(path) if self.additional_fields: d.update(self.additional_fields) # always add additional fields, even for failed jobs try: d["dir_name_full"] = d["dir_name"].split(":")[1] d["dir_name"] = get_block_part(d["dir_name_full"]) d["stored_data"] = {} except: print 'COULD NOT GET DIR NAME' pprint.pprint(d) print traceback.format_exc() raise ValueError('IMPROPER PARSING OF {}'.format(path)) if not self.simulate: # Perform actual insertion into db. Because db connections cannot # be pickled, every insertion needs to create a new connection # to the db. conn = MongoClient(self.host, self.port) db = conn[self.database] if self.user: db.authenticate(self.user, self.password) coll = db[self.collection] # Insert dos data into gridfs and then remove it from the dict. # DOS data tends to be above the 4Mb limit for mongo docs. A ref # to the dos file is in the dos_fs_id. result = coll.find_one({"dir_name": d["dir_name"]}) if result is None or self.update_duplicates: if self.parse_dos and "calculations" in d: for calc in d["calculations"]: if "dos" in calc: dos = json.dumps(calc["dos"], cls=MontyEncoder) fs = gridfs.GridFS(db, "dos_fs") dosid = fs.put(dos) calc["dos_fs_id"] = dosid del calc["dos"] d["last_updated"] = datetime.datetime.today() if result is None: if ("task_id" not in d) or (not d["task_id"]): d["task_id"] = "mp-{}".format( db.counter.find_and_modify( query={"_id": "taskid"}, update={"$inc": {"c": 1}})["c"]) logger.info("Inserting {} with taskid = {}" .format(d["dir_name"], d["task_id"])) elif self.update_duplicates: d["task_id"] = result["task_id"] logger.info("Updating {} with taskid = {}" .format(d["dir_name"], d["task_id"])) #Fireworks processing self.process_fw(path, d) try: #Add oxide_type struct=Structure.from_dict(d["output"]["crystal"]) d["oxide_type"]=oxide_type(struct) except: logger.error("can't get oxide_type for {}".format(d["task_id"])) d["oxide_type"] = None #Override incorrect outcar subdocs for two step relaxations if "optimize structure" in d['task_type'] and \ os.path.exists(os.path.join(path, "relax2")): try: run_stats = {} for i in [1,2]: o_path = os.path.join(path,"relax"+str(i),"OUTCAR") o_path = o_path if os.path.exists(o_path) else o_path+".gz" outcar = Outcar(o_path) d["calculations"][i-1]["output"]["outcar"] = outcar.as_dict() run_stats["relax"+str(i)] = outcar.run_stats except: logger.error("Bad OUTCAR for {}.".format(path)) try: overall_run_stats = {} for key in ["Total CPU time used (sec)", "User time (sec)", "System time (sec)", "Elapsed time (sec)"]: overall_run_stats[key] = sum([v[key] for v in run_stats.values()]) run_stats["overall"] = overall_run_stats except: logger.error("Bad run stats for {}.".format(path)) d["run_stats"] = run_stats # add is_compatible mpc = MaterialsProjectCompatibility("Advanced") try: func = d["pseudo_potential"]["functional"] labels = d["pseudo_potential"]["labels"] symbols = ["{} {}".format(func, label) for label in labels] parameters = {"run_type": d["run_type"], "is_hubbard": d["is_hubbard"], "hubbards": d["hubbards"], "potcar_symbols": symbols} entry = ComputedEntry(Composition(d["unit_cell_formula"]), 0.0, 0.0, parameters=parameters, entry_id=d["task_id"]) d['is_compatible'] = bool(mpc.process_entry(entry)) except: traceback.print_exc() print 'ERROR in getting compatibility' d['is_compatible'] = None #task_type dependent processing if 'static' in d['task_type']: launch_doc = launches_coll.find_one({"fw_id": d['fw_id'], "launch_dir": {"$regex": d["dir_name"]}}, {"action.stored_data": 1}) for i in ["conventional_standard_structure", "symmetry_operations", "symmetry_dataset", "refined_structure"]: try: d['stored_data'][i] = launch_doc['action']['stored_data'][i] except: pass #parse band structure if necessary if ('band structure' in d['task_type'] or "Uniform" in d['task_type'])\ and d['state'] == 'successful': launch_doc = launches_coll.find_one({"fw_id": d['fw_id'], "launch_dir": {"$regex": d["dir_name"]}}, {"action.stored_data": 1}) vasp_run = Vasprun(zpath(os.path.join(path, "vasprun.xml")), parse_projected_eigen=False) if 'band structure' in d['task_type']: def string_to_numlist(stringlist): g=re.search('([0-9\-\.eE]+)\s+([0-9\-\.eE]+)\s+([0-9\-\.eE]+)', stringlist) return [float(g.group(i)) for i in range(1,4)] for i in ["kpath_name", "kpath"]: d['stored_data'][i] = launch_doc['action']['stored_data'][i] kpoints_doc = d['stored_data']['kpath']['kpoints'] for i in kpoints_doc: kpoints_doc[i]=string_to_numlist(kpoints_doc[i]) bs=vasp_run.get_band_structure(efermi=d['calculations'][0]['output']['outcar']['efermi'], line_mode=True) else: bs=vasp_run.get_band_structure(efermi=d['calculations'][0]['output']['outcar']['efermi'], line_mode=False) bs_json = json.dumps(bs.as_dict(), cls=MontyEncoder) fs = gridfs.GridFS(db, "band_structure_fs") bs_id = fs.put(bs_json) d['calculations'][0]["band_structure_fs_id"] = bs_id # also override band gap in task doc gap = bs.get_band_gap() vbm = bs.get_vbm() cbm = bs.get_cbm() update_doc = {'bandgap': gap['energy'], 'vbm': vbm['energy'], 'cbm': cbm['energy'], 'is_gap_direct': gap['direct']} d['analysis'].update(update_doc) d['calculations'][0]['output'].update(update_doc) coll.update({"dir_name": d["dir_name"]}, d, upsert=True) return d["task_id"], d else: logger.info("Skipping duplicate {}".format(d["dir_name"])) return result["task_id"], result else: d["task_id"] = 0 logger.info("Simulated insert into database for {} with task_id {}" .format(d["dir_name"], d["task_id"])) return 0, d
def from_previous_vasp_run(previous_vasp_dir, output_dir='.', user_incar_settings=None, copy_chgcar=True, make_dir_if_not_present=True, kpoints_density=1000): """ Generate a set of Vasp input files for NonSCF calculations from a directory of previous static Vasp run. Args: previous_vasp_dir (str): The directory contains the outputs( vasprun.xml and OUTCAR) of previous vasp run. output_dir (str): The directory to write the VASP input files for the NonSCF calculations. Default to write in the current directory. user_incar_settings (dict): A dict specify customized settings for INCAR. copy_chgcar (bool): Default to copy CHGCAR from SC run make_dir_if_not_present (bool): Set to True if you want the directory (and the whole path) to be created if it is not present. kpoints_density (int): kpoints density for the reciprocal cell of structure. Might need to increase the default value when calculating metallic materials. kpoints_line_density (int): kpoints density to use in line-mode. Might need to increase the default value when calculating metallic materials. """ user_incar_settings = user_incar_settings or {} try: vasp_run = Vasprun(os.path.join(previous_vasp_dir, "vasprun.xml"), parse_dos=False, parse_eigen=None) outcar = Outcar(os.path.join(previous_vasp_dir, "OUTCAR")) previous_incar = vasp_run.incar except: traceback.print_exc() raise RuntimeError("Can't get valid results from previous run. prev dir: {}".format(previous_vasp_dir)) #Get a Magmom-decorated structure structure = TetrahedronDosSet.get_structure(vasp_run, outcar, initial_structure=True) nscf_incar_settings = TetrahedronDosSet.get_incar_settings(vasp_run, outcar) mpnscfvip = TetrahedronDosSet(nscf_incar_settings, kpoints_density=kpoints_density) mpnscfvip.write_input(structure, output_dir, make_dir_if_not_present) if copy_chgcar: try: shutil.copyfile(os.path.join(previous_vasp_dir, "CHGCAR"), os.path.join(output_dir, "CHGCAR")) except Exception as e: traceback.print_exc() raise RuntimeError("Can't copy CHGCAR from SC run" + '\n' + str(e)) #Overwrite necessary INCAR parameters from previous runs # this is already done in the __init__; is it necessary here? previous_incar.update(nscf_incar_settings) previous_incar.update(user_incar_settings) # MAKE EXTRA DUPER SURE THESE PARAMETERS ARE NOT OVERWRITTEN previous_incar.update({"IBRION": -1, "ISMEAR": -5, "LCHARG": False, "LORBIT": 11, "LWAVE": False, "NSW": 0, "ISYM": 0, "ICHARG": 11}) previous_incar.pop("MAGMOM", None) previous_incar.pop('SIGMA',None) previous_incar.write_file(os.path.join(output_dir, "INCAR")) # Perform checking on INCAR parameters if any([previous_incar.get("NSW", 0) != 0, previous_incar["IBRION"] != -1, previous_incar["ICHARG"] != 11, any([sum(previous_incar["LDAUU"]) <= 0, previous_incar["LMAXMIX"] < 4]) if previous_incar.get("LDAU") else False]): raise ValueError("Incompatible INCAR parameters!")
def assimilate(self, path, launches_coll=None): """ Parses vasp runs. Then insert the result into the db. and return the task_id or doc of the insertion. Returns: If in simulate_mode, the entire doc is returned for debugging purposes. Else, only the task_id of the inserted doc is returned. """ d = self.get_task_doc(path, self.parse_dos, self.additional_fields) try: d["dir_name_full"] = d["dir_name"].split(":")[1] d["dir_name"] = get_block_part(d["dir_name_full"]) d["stored_data"] = {} except: print 'COULD NOT GET DIR NAME' pprint.pprint(d) print traceback.format_exc() raise ValueError('IMPROPER PARSING OF {}'.format(path)) if not self.simulate: # Perform actual insertion into db. Because db connections cannot # be pickled, every insertion needs to create a new connection # to the db. conn = MongoClient(self.host, self.port) db = conn[self.database] if self.user: db.authenticate(self.user, self.password) coll = db[self.collection] # Insert dos data into gridfs and then remove it from the dict. # DOS data tends to be above the 4Mb limit for mongo docs. A ref # to the dos file is in the dos_fs_id. result = coll.find_one({"dir_name": d["dir_name"]}) if result is None or self.update_duplicates: if self.parse_dos and "calculations" in d: for calc in d["calculations"]: if "dos" in calc: dos = json.dumps(calc["dos"]) fs = gridfs.GridFS(db, "dos_fs") dosid = fs.put(dos) calc["dos_fs_id"] = dosid del calc["dos"] d["last_updated"] = datetime.datetime.today() if result is None: if ("task_id" not in d) or (not d["task_id"]): d["task_id"] = "mp-{}".format( db.counter.find_and_modify( query={"_id": "taskid"}, update={"$inc": {"c": 1}})["c"]) logger.info("Inserting {} with taskid = {}" .format(d["dir_name"], d["task_id"])) elif self.update_duplicates: d["task_id"] = result["task_id"] logger.info("Updating {} with taskid = {}" .format(d["dir_name"], d["task_id"])) #Fireworks processing self.process_fw(path, d) #Override incorrect outcar subdocs for two step relaxations if "optimize structure" in d['task_type'] and \ os.path.exists(os.path.join(path, "relax2")): try: run_stats = {} for i in [1,2]: outcar = Outcar(os.path.join(path,"relax"+str(i),"OUTCAR")) d["calculations"][i-1]["output"]["outcar"] = outcar.to_dict run_stats["relax"+str(i)] = outcar.run_stats except: logger.error("Bad OUTCAR for {}.".format(path)) try: overall_run_stats = {} for key in ["Total CPU time used (sec)", "User time (sec)", "System time (sec)", "Elapsed time (sec)"]: overall_run_stats[key] = sum([v[key] for v in run_stats.values()]) run_stats["overall"] = overall_run_stats except: logger.error("Bad run stats for {}.".format(path)) d["run_stats"] = run_stats #task_type dependent processing if 'static' in d['task_type']: launch_doc = launches_coll.find_one({"fw_id": d['fw_id'], "launch_dir": {"$regex": d["dir_name"]}}, {"action.stored_data": 1}) for i in ["conventional_standard_structure", "symmetry_operations", "symmetry_dataset", "refined_structure"]: try: d['stored_data'][i] = launch_doc['action']['stored_data'][i] except: pass #parse band structure if necessary if ('band structure' in d['task_type'] or "Uniform" in d['task_type'])\ and d['state'] == 'successful': launch_doc = launches_coll.find_one({"fw_id": d['fw_id'], "launch_dir": {"$regex": d["dir_name"]}}, {"action.stored_data": 1}) vasp_run = Vasprun(os.path.join(path, "vasprun.xml"), parse_projected_eigen=False) if 'band structure' in d['task_type']: def string_to_numlist(stringlist): g=re.search('([0-9\-\.eE]+)\s+([0-9\-\.eE]+)\s+([0-9\-\.eE]+)', stringlist) return [float(g.group(i)) for i in range(1,4)] for i in ["kpath_name", "kpath"]: d['stored_data'][i] = launch_doc['action']['stored_data'][i] kpoints_doc = d['stored_data']['kpath']['kpoints'] for i in kpoints_doc: kpoints_doc[i]=string_to_numlist(kpoints_doc[i]) bs=vasp_run.get_band_structure(efermi=d['calculations'][0]['output']['outcar']['efermi'], line_mode=True) else: bs=vasp_run.get_band_structure(efermi=d['calculations'][0]['output']['outcar']['efermi'], line_mode=False) bs_json = json.dumps(bs.to_dict) fs = gridfs.GridFS(db, "band_structure_fs") bs_id = fs.put(bs_json) d['calculations'][0]["band_structure_fs_id"] = bs_id coll.update({"dir_name": d["dir_name"]}, d, upsert=True) return d["task_id"], d else: logger.info("Skipping duplicate {}".format(d["dir_name"])) return result["task_id"], result else: d["task_id"] = 0 logger.info("Simulated insert into database for {} with task_id {}" .format(d["dir_name"], d["task_id"])) return 0, d