def test_nicht_konv(self): h = VaspErrorHandler("vasp.nicht_konvergent") h.natoms_large_cell = 5 self.assertEqual(h.check(), True) self.assertEqual(h.correct()["errors"], ["nicht_konv"]) i = Incar.from_file("INCAR") self.assertEqual(i["LREAL"], True)
def test_brmix(self): h = VaspErrorHandler("vasp.brmix") self.assertEqual(h.check(), True) # The first (no good OUTCAR) correction, check IMIX d = h.correct() self.assertEqual(d["errors"], ['brmix']) vi = VaspInput.from_directory(".") self.assertEqual(vi["INCAR"]["IMIX"], 1) self.assertTrue(os.path.exists("CHGCAR")) # The next correction check Gamma and evenize h.correct() vi = VaspInput.from_directory(".") self.assertFalse("IMIX" in vi["INCAR"]) self.assertTrue(os.path.exists("CHGCAR")) if vi["KPOINTS"].style == Kpoints.supported_modes.Gamma and vi["KPOINTS"].num_kpts < 1: all_kpts_even = all([ bool(n % 2 == 0) for n in vi["KPOINTS"].kpts[0] ]) self.assertFalse(all_kpts_even) # The next correction check ISYM and no CHGCAR h.correct() vi = VaspInput.from_directory(".") self.assertEqual(vi["INCAR"]["ISYM"], 0) self.assertFalse(os.path.exists("CHGCAR")) shutil.copy("INCAR.nelect", "INCAR") h = VaspErrorHandler("vasp.brmix") self.assertEqual(h.check(), False) d = h.correct() self.assertEqual(d["errors"], [])
def test_dentet(self): h = VaspErrorHandler("vasp.dentet") h.check() d = h.correct() self.assertEqual(d["errors"], ['dentet']) self.assertEqual(d["actions"], [{'action': {'_set': {'ISMEAR': 0}}, 'dict': 'INCAR'}])
def test_eddrmm(self): h = VaspErrorHandler("vasp.eddrmm") self.assertEqual(h.check(), True) self.assertEqual(h.correct()["errors"], ["eddrmm"]) i = Incar.from_file("INCAR") self.assertEqual(i["ALGO"], "Normal") self.assertEqual(h.correct()["errors"], ["eddrmm"]) i = Incar.from_file("INCAR") self.assertEqual(i["POTIM"], 0.25)
def test_rhosyg(self): h = VaspErrorHandler("vasp.rhosyg") self.assertEqual(h.check(), True) self.assertEqual(h.correct()["errors"], ["rhosyg"]) i = Incar.from_file("INCAR") self.assertEqual(i["SYMPREC"], 1e-4) self.assertEqual(h.correct()["errors"], ["rhosyg"]) i = Incar.from_file("INCAR") self.assertEqual(i["ISYM"], 0)
def test_first_step(self): shutil.copy("OSZICAR.empty", "OSZICAR") s1 = Structure.from_file("POSCAR") h = VaspErrorHandler("vasp.out") self.assertEqual(h.check(), True) d = h.correct() self.assertEqual(d['errors'], ['zpotrf']) s2 = Structure.from_file("POSCAR") self.assertAlmostEqual(s2.volume, s1.volume * 1.2 ** 3, 3)
def test_potim_correction(self): shutil.copy("OSZICAR.one_step", "OSZICAR") s1 = Structure.from_file("POSCAR") h = VaspErrorHandler("vasp.out") self.assertEqual(h.check(), True) d = h.correct() self.assertEqual(d['errors'], ['zpotrf']) s2 = Structure.from_file("POSCAR") self.assertAlmostEqual(s2.volume, s1.volume, 3) self.assertAlmostEqual(Incar.from_file("INCAR")['POTIM'], 0.25)
def test_too_few_bands(self): os.chdir(os.path.join(test_dir, "too_few_bands")) shutil.copy("INCAR", "INCAR.orig") h = VaspErrorHandler("vasp.too_few_bands") h.check() d = h.correct() self.assertEqual(d["errors"], ['too_few_bands']) self.assertEqual(d["actions"], [{'action': {'_set': {'NBANDS': 501}}, 'dict': 'INCAR'}]) os.remove("error.1.tar.gz") shutil.move("INCAR.orig", "INCAR")
def test_rot_matrix(self): if "PMG_VASP_PSP_DIR" not in os.environ: os.environ["PMG_VASP_PSP_DIR"] = test_dir subdir = os.path.join(test_dir, "poscar_error") os.chdir(subdir) shutil.copy("KPOINTS", "KPOINTS.orig") h = VaspErrorHandler() h.check() d = h.correct() self.assertEqual(d["errors"], ["rot_matrix"]) os.remove(os.path.join(subdir, "error.1.tar.gz")) shutil.copy("KPOINTS.orig", "KPOINTS") os.remove("KPOINTS.orig")
def test_aliasing(self): os.chdir(os.path.join(test_dir, "aliasing")) shutil.copy("INCAR", "INCAR.orig") h = VaspErrorHandler("vasp.aliasing") h.check() d = h.correct() shutil.move("INCAR.orig", "INCAR") clean_dir() os.chdir(test_dir) self.assertEqual(d["errors"], ['aliasing']) self.assertEqual(d["actions"], [{'action': {'_set': {'NGX': 34}}, 'dict': 'INCAR'}, {"file": "CHGCAR", "action": {"_file_delete": {'mode': "actual"}}}, {"file": "WAVECAR", "action": {"_file_delete": {'mode': "actual"}}}])
def test_static_run_correction(self): shutil.copy("OSZICAR.empty", "OSZICAR") s1 = Structure.from_file("POSCAR") incar = Incar.from_file("INCAR") # Test for NSW 0 incar.update({"NSW": 0}) incar.write_file("INCAR") h = VaspErrorHandler("vasp.out") self.assertEqual(h.check(), True) d = h.correct() self.assertEqual(d['errors'], ['zpotrf']) s2 = Structure.from_file("POSCAR") self.assertAlmostEqual(s2.volume, s1.volume, 3) self.assertEqual(Incar.from_file("INCAR")["ISYM"], 0) # Test for ISIF 0-2 incar.update({"NSW":99, "ISIF":2}) incar.write_file("INCAR") h = VaspErrorHandler("vasp.out") self.assertEqual(h.check(), True) d = h.correct() self.assertEqual(d['errors'], ['zpotrf']) s2 = Structure.from_file("POSCAR") self.assertAlmostEqual(s2.volume, s1.volume, 3) self.assertEqual(Incar.from_file("INCAR")["ISYM"], 0)
def test_brmix(self): h = VaspErrorHandler("vasp.brmix") self.assertEqual(h.check(), True) d = h.correct() self.assertEqual(d["errors"], ['brmix']) self.assertFalse(os.path.exists("CHGCAR")) shutil.copy("INCAR.nelect", "INCAR") h = VaspErrorHandler("vasp.brmix") self.assertEqual(h.check(), False) d = h.correct() self.assertEqual(d["errors"], [])
def test_subspace(self): h = VaspErrorHandler("vasp.subspace") h.check() d = h.correct() self.assertEqual(d["errors"], ['subspacematrix']) self.assertEqual(d["actions"], [{'action': {'_set': {'LREAL': False}}, 'dict': 'INCAR'}]) # 2nd error should set PREC to accurate. h.check() d = h.correct() self.assertEqual(d["errors"], ['subspacematrix']) self.assertEqual(d["actions"], [{'action': {'_set': {'PREC': 'Accurate'}}, 'dict': 'INCAR'}])
def test_brmix(self): h = VaspErrorHandler("vasp.brmix") self.assertEqual(h.check(), True) d = h.correct() self.assertEqual(d["errors"], ['brmix']) vi = VaspInput.from_directory(".") chgcar_exists = os.path.exists("CHGCAR") if h.error_count['brmix'] > 1: self.assertFalse(chgcar_exists) else: self.assertTrue(chgcar_exists) if vi["KPOINTS"].style == Kpoints.supported_modes.Gamma and vi["KPOINTS"].num_kpts < 1: all_kpts_even = all([ bool(n % 2 == 0) for n in vi["KPOINTS"].kpts[0] ]) self.assertFalse(all_kpts_even) shutil.copy("INCAR.nelect", "INCAR") h = VaspErrorHandler("vasp.brmix") self.assertEqual(h.check(), False) d = h.correct() self.assertEqual(d["errors"], [])
def test_check_correct(self): h = VaspErrorHandler("vasp.teterror") h.check() d = h.correct() self.assertEqual(d["errors"], ['tet']) self.assertEqual(d["actions"], [{'action': {'_set': {'ISMEAR': 0}}, 'dict': 'INCAR'}]) h = VaspErrorHandler("vasp.classrotmat") h.check() d = h.correct() self.assertEqual(d["errors"], ['rot_matrix']) self.assertEqual(set([a["dict"] for a in d["actions"]]), set(["POSCAR", "INCAR"])) h = VaspErrorHandler("vasp.real_optlay") h.check() d = h.correct() self.assertEqual(d["errors"], ['real_optlay']) self.assertEqual(d["actions"], [{'action': {'_set': {'LREAL': False}}, 'dict': 'INCAR'}])
import sys from custodian.custodian import Custodian from custodian.vasp.handlers import VaspErrorHandler, \ UnconvergedErrorHandler, AliasingErrorHandler, FrozenJobErrorHandler, \ PositiveEnergyErrorHandler, MeshSymmetryErrorHandler from custodian.vasp.jobs import VaspJob handlers = [ VaspErrorHandler(), UnconvergedErrorHandler(), AliasingErrorHandler(), FrozenJobErrorHandler(), PositiveEnergyErrorHandler(), MeshSymmetryErrorHandler() ] jobs = [VaspJob(sys.argv[1:])] c = Custodian(handlers, jobs, max_errors=10) c.run()
def test_rot_matrix_vasp6(self): h = VaspErrorHandler("vasp6.sgrcon") self.assertEqual(h.check(), True) self.assertEqual(h.correct()["errors"], ["rot_matrix"])
def test_to_from_dict(self): h = VaspErrorHandler("random_name") h2 = VaspErrorHandler.from_dict(h.as_dict()) self.assertEqual(type(h2), type(h)) self.assertEqual(h2.output_filename, "random_name")
def test_point_group(self): h = VaspErrorHandler("vasp.point_group") self.assertEqual(h.check(), True) self.assertEqual(h.correct()["errors"], ["point_group"]) i = Incar.from_file("INCAR") self.assertEqual(i["ISYM"], 0)
def run_task(self, fw_spec): handler_groups = { "default": [ VaspErrorHandler(), MeshSymmetryErrorHandler(), UnconvergedErrorHandler(), NonConvergingErrorHandler(), PotimErrorHandler(), PositiveEnergyErrorHandler(), FrozenJobErrorHandler(), StdErrHandler(), DriftErrorHandler() ], "strict": [ VaspErrorHandler(), MeshSymmetryErrorHandler(), UnconvergedErrorHandler(), NonConvergingErrorHandler(), PotimErrorHandler(), PositiveEnergyErrorHandler(), FrozenJobErrorHandler(), StdErrHandler(), AliasingErrorHandler(), DriftErrorHandler() ], "md": [VaspErrorHandler(), NonConvergingErrorHandler()], "no_handler": [] } vasp_cmd = env_chk(self["vasp_cmd"], fw_spec) if isinstance(vasp_cmd, six.string_types): vasp_cmd = os.path.expandvars(vasp_cmd) vasp_cmd = shlex.split(vasp_cmd) # initialize variables scratch_dir = env_chk(self.get("scratch_dir"), fw_spec) gzip_output = self.get("gzip_output", True) max_errors = self.get("max_errors", 5) auto_npar = env_chk(self.get("auto_npar"), fw_spec, strict=False, default=False) gamma_vasp_cmd = env_chk(self.get("gamma_vasp_cmd"), fw_spec, strict=False, default=None) jobs = [ VaspJob(vasp_cmd, auto_npar=auto_npar, gamma_vasp_cmd=gamma_vasp_cmd) ] # construct handlers handlers = handler_groups[self.get("handler_group", "default")] validators = [] c = Custodian(handlers, jobs, validators=validators, max_errors=max_errors, scratch_dir=scratch_dir, gzipped_output=gzip_output) c.run()
def test_symprec_noise(self): h = VaspErrorHandler("vasp.symprec_noise") self.assertEqual(h.check(), True) self.assertEqual(h.correct()["errors"], ["symprec_noise"]) i = Incar.from_file("INCAR") self.assertEqual(i["SYMPREC"], 1e-6)
def test_check_correct(self): h = VaspErrorHandler("vasp.teterror") h.check() d = h.correct() self.assertEqual(d["errors"], ['tet']) self.assertEqual(d["actions"], [{ 'action': { '_set': { 'ISMEAR': 0 } }, 'dict': 'INCAR' }]) h = VaspErrorHandler("vasp.sgrcon") h.check() d = h.correct() self.assertEqual(d["errors"], ['rot_matrix']) self.assertEqual(set([a["dict"] for a in d["actions"]]), {"KPOINTS"}) h = VaspErrorHandler("vasp.real_optlay") h.check() d = h.correct() self.assertEqual(d["errors"], ['real_optlay']) self.assertEqual(d["actions"], [{ 'action': { '_set': { 'LREAL': False } }, 'dict': 'INCAR' }])
def test_posmap_vasp6(self): h = VaspErrorHandler("vasp6.posmap") self.assertEqual(h.check(), True) self.assertEqual(h.correct()["errors"], ["posmap"]) i = Incar.from_file("INCAR") self.assertAlmostEqual(i["SYMPREC"], 1e-6)
def test_gradient_not_orthogonal(self): h = VaspErrorHandler("vasp.gradient_not_orthogonal") self.assertEqual(h.check(), True) self.assertEqual(h.correct()["errors"], ["grad_not_orth"]) i = Incar.from_file("INCAR") self.assertEqual(i["ISMEAR"], 0)
def test_edddav(self): h = VaspErrorHandler("vasp.edddav") self.assertEqual(h.check(), True) self.assertEqual(h.correct()["errors"], ["edddav"]) self.assertFalse(os.path.exists("CHGCAR"))
def test_pssyevx(self): h = VaspErrorHandler("vasp.pssyevx") self.assertEqual(h.check(), True) self.assertEqual(h.correct()["errors"], ["pssyevx"]) i = Incar.from_file("INCAR") self.assertEqual(i["ALGO"], "Normal")
def structure_to_wf(structure): """ This method starts with a Structure object and creates a Workflow object The workflow has two steps - a structure relaxation and a static run :param structure: :return: """ fws = [] # list of FireWorks to run connections = defaultdict(list) # dependencies between FireWorks # generate VASP input objects for 1st VASP run - this is put in the FW spec mpvis = MPRelaxSet(structure, user_incar_settings={ 'NCORE': 8, "GGAU": False }) incar = mpvis.incar poscar = mpvis.poscar kpoints = mpvis.poscar potcar = mpvis.poscar # serialize the VASP input objects to the FW spec spec = {} spec['vasp'] = {} spec['vasp']['incar'] = incar.as_dict() spec['vasp']['poscar'] = poscar.as_dict() spec['vasp']['kpoints'] = kpoints.as_dict() spec['vasp']['potcar'] = potcar.as_dict() spec['vaspinputset_name'] = mpvis.__class__.__name__ spec['task_type'] = 'GGA optimize structure (2x) example' # set up the custodian that we want to run jobs = VaspJob.double_relaxation_run('') for j in jobs: # turn off auto npar, it doesn't work for >1 node j.auto_npar = False handlers = [ VaspErrorHandler(), FrozenJobErrorHandler(), MeshSymmetryErrorHandler(), NonConvergingErrorHandler() ] c_params = { 'jobs': [j.as_dict() for j in jobs], 'handlers': [h.as_dict() for h in handlers], 'max_errors': 5 } custodiantask = VaspCustodianTaskEx(c_params) # 1st Firework - run GGA optimize structure # VaspWriterTask - write input files (INCAR, POSCAR, KPOINTS, POSCAR) based on spec # CustodianTaskEx - run VASP within a custodian tasks = [VaspWriterTask(), custodiantask] fws.append( Firework(tasks, spec, name=get_name(structure, spec['task_type']), fw_id=1)) # 2nd Firework - insert previous run into DB spec = {'task_type': 'VASP db insertion example'} fws.append( Firework([VaspToDBTaskEx()], spec, name=get_name(structure, spec['task_type']), fw_id=2)) connections[1] = [2] # 3rd Firework - static run. # VaspCopyTask - copy output from previous run to this directory # SetupStaticRunTask - override old parameters for static run # CustodianTaskEx - run VASP within a custodian spec = {'task_type': 'GGA static example'} copytask = VaspCopyTask({'use_CONTCAR': True, 'skip_CHGCAR': True}) setuptask = SetupStaticRunTask() custodiantask = VaspCustodianTaskEx({ 'jobs': [VaspJob('', auto_npar=False).as_dict()], 'handlers': [h.as_dict() for h in handlers], 'max_errors': 5 }) fws.append( Firework([copytask, setuptask, custodiantask], spec, name=get_name(structure, spec['task_type']), fw_id=3)) connections[2] = [3] # 4th Firework - insert previous run into DB spec = {'task_type': 'VASP db insertion example'} fws.append( Firework([VaspToDBTaskEx()], spec, name=get_name(structure, spec['task_type']), fw_id=4)) connections[3] = [4] return Workflow(fws, connections, name=get_slug(structure.formula))
def __init__(self, parameters): self.update(parameters) self.jobs = [VaspJob.from_dict(d) for d in self['jobs']] self.handlers = [VaspErrorHandler.from_dict(d) for d in self['handlers']] self.max_errors = self.get('max_errors', 1)
def test_check_correct(self): h = VaspErrorHandler("vasp.teterror") h.check() d = h.correct() self.assertEqual(d["errors"], ['tet']) self.assertEqual(d["actions"], [{'action': {'_set': {'ISMEAR': 0}}, 'dict': 'INCAR'}]) h = VaspErrorHandler("vasp.sgrcon") h.check() d = h.correct() self.assertEqual(d["errors"], ['rot_matrix']) self.assertEqual(set([a["dict"] for a in d["actions"]]), {"KPOINTS"}) h = VaspErrorHandler("vasp.real_optlay") h.check() d = h.correct() self.assertEqual(d["errors"], ['real_optlay']) self.assertEqual(d["actions"], [{'action': {'_set': {'LREAL': False}}, 'dict': 'INCAR'}]) subdir = os.path.join(test_dir, "large_cell_real_optlay") os.chdir(subdir) shutil.copy("INCAR", "INCAR.orig") h = VaspErrorHandler() h.check() d = h.correct() self.assertEqual(d["errors"], ['real_optlay']) vi = VaspInput.from_directory(".") self.assertEqual(vi["INCAR"]["LREAL"], True) h.check() d = h.correct() self.assertEqual(d["errors"], ['real_optlay']) vi = VaspInput.from_directory(".") self.assertEqual(vi["INCAR"]["LREAL"], False) shutil.copy("INCAR.orig", "INCAR") os.remove("INCAR.orig") os.remove("error.1.tar.gz") os.remove("error.2.tar.gz") os.chdir(test_dir)
from pymatgen.io.vasp.inputs import Incar, Poscar, VaspInput,Potcar, Kpoints import os,shutil from custodian.vasp.jobs import VaspJob from custodian.vasp.handlers import VaspErrorHandler, UnconvergedErrorHandler,MeshSymmetryErrorHandler, NonConvergingErrorHandler, PotimErrorHandler from custodian.vasp.validators import VasprunXMLValidator from custodian.custodian import Custodian inc=Incar.from_file("INCAR") pot=Potcar.from_file("POTCAR") pos=Poscar.from_file("POSCAR") kp=Kpoints.from_file("KPOINTS") shutil.copy2('/users/knc6/bin/vdw_kernel.bindat','./') vinput = VaspInput.from_directory(".") job=VaspJob(['mpirun', '-np', '16', '/users/knc6/VASP/vasp54/src/vasp.5.4.1/bin/vasp_std'], final=False, backup=False) handlers = [VaspErrorHandler(), MeshSymmetryErrorHandler(),UnconvergedErrorHandler(), NonConvergingErrorHandler(),PotimErrorHandler()] validators = [VasprunXMLValidator()] c = Custodian(handlers, [job],max_errors=5,validators=validators) c.run()
def get_aneb_wf( structure, working_ion, insert_coords, insert_coords_combinations, n_images, vasp_input_set=None, override_default_vasp_params=None, handler_group=None, selective_dynamics_scheme="fix_two_atom", launch_mode="all", vasp_cmd=VASP_CMD, db_file=DB_FILE, wall_time=None, additional_fields=None, tags=None, powerup_dicts=None, name="ApproxNEB", ): """ Workflow for running the "ApproxNEB" algorithm to estimate energetic barriers for a working ion in a structure (host) between end point positions specified by insert_coords and insert_coords_combinations. Note this workflow is only intended for the dilute lattice limit (where one working ion is in a large supercell structure of the host and little volume change upon insertion is expected). By default workflow sets appropriate VASP input parameters and Custodian handler groups. This workflow uses an "approx_neb" collection to organize outputs and generate inputs for new VASP calculations for easier data management and analysis. An "approx_neb" additional field is automatically added to all task docs generated to assist record keeping. To make modifications to docs generated by this workflow, use of the additional_fields and tags arguments is recommended to ensure all fireworks, tasks collection docs, and approx_neb collection docs are modified. Args: structure (Structure): structure of empty host working_ion: specie of site to insert in structure (e.g. "Li"). insert_coords (1x3 array or list of 1x3 arrays): fractional coordinates of site(s) to insert in structure (e.g. [[0,0,0], [0,0.25,0], [0.5,0,0]]). insert_coords_combinations (list of strings): list of strings corresponding to the list index of insert_coords to specify which combination of end_points to use for path interpolation. (e.g. ["0+1", "0+2"]) n_images: n_images (int): number of images interpolated between end point structures for each path set by insert_coords_combinations vasp_input_set (VaspInputSet class): can use to define VASP input parameters. See pymatgen.io.vasp.sets module for more information. MPRelaxSet() and override_default_vasp_params are used if vasp_input_set = None. override_default_vasp_params (dict): if provided, vasp_input_set is disregarded and the Vasp Input Set is created by passing override_default_vasp_params to MPRelaxSet(). Allows for easy modification of MPRelaxSet(). For example, to set ISIF=2 in the INCAR use: {"user_incar_settings":{"ISIF":2}} handler_group (str or [ErrorHandler]): group of handlers to use for RunVaspCustodian firetask. See handler_groups dict in the code for the groups and complete list of handlers in each group. Alternatively, you can specify a list of ErrorHandler objects. selective_dynamics_scheme (str): "fix_two_atom" launch_mode (str): "all" or "screening" vasp_cmd (str): the name of the full executable for running VASP. db_file (str): path to file containing the database credentials. wall_time (int): Total walltime in seconds. If this is None and the job is running on a PBS system, the handler will attempt to determine the walltime from the PBS_WALLTIME environment variable. If the wall time cannot be determined or is not set, this handler will have no effect. additional_fields (dict): specifies more information to be stored in the approx_neb collection to assist user record keeping. tags (list): list of strings to be stored in the approx_neb collection under the "tags" field to assist user record keeping. powerup_dicts (list): additional powerups given to all the dynamically created image fireworks name (str): name for the workflow returned Returns: Workflow """ approx_neb_params = override_default_vasp_params or { "user_incar_settings": { "EDIFF": 0.0005, "EDIFFG": -0.05, "IBRION": 1, "ISIF": 3, "ISMEAR": 0, "LDAU": False, "NSW": 400, "ADDGRID": True, "ISYM": 1, "NELMIN": 4, } } handler_group = handler_group or [ VaspErrorHandler(), MeshSymmetryErrorHandler(), NonConvergingErrorHandler(), PotimErrorHandler(), PositiveEnergyErrorHandler(), FrozenJobErrorHandler(), StdErrHandler(), WalltimeHandler(wall_time=wall_time), ] wf_uuid = str(uuid4()) additional_fields = deepcopy(additional_fields) host_fw = HostFW( structure=structure, approx_neb_wf_uuid=wf_uuid, db_file=db_file, vasp_input_set=vasp_input_set, vasp_cmd=vasp_cmd, override_default_vasp_params=deepcopy(approx_neb_params), additional_fields=additional_fields, tags=tags, ) # modifies incar settings needed for end point and image structure relaxations if "user_incar_settings" not in approx_neb_params.keys(): approx_neb_params = {"user_incar_settings": {}} approx_neb_params["user_incar_settings"]["ISIF"] = 2 approx_neb_params["user_incar_settings"]["ISYM"] = 0 approx_neb_params["user_incar_settings"]["LDAU"] = False end_point_fws = [] for n, coord in enumerate(insert_coords): end_point_fws.append( EndPointFW( approx_neb_wf_uuid=wf_uuid, insert_specie=working_ion, insert_coords=coord, end_points_index=n, db_file=db_file, override_default_vasp_params=approx_neb_params, parents=host_fw, )) evaluate_path_fws = [] for end_points_combo in insert_coords_combinations: if isinstance(end_points_combo, (str)): combo = end_points_combo.split("+") if len(combo) == 2: c = [int(combo[0]), int(combo[-1])] else: raise ValueError( "string format in insert_coords_combinations is incorrect") evaluate_path_fws.append( EvaluatePathFW( approx_neb_wf_uuid=wf_uuid, end_points_combo=end_points_combo, mobile_specie=working_ion, n_images=n_images, selective_dynamics_scheme=selective_dynamics_scheme, launch_mode=launch_mode, vasp_cmd=vasp_cmd, db_file=db_file, override_default_vasp_params=approx_neb_params, handler_group=handler_group, parents=[end_point_fws[c[0]], end_point_fws[c[1]]], add_additional_fields=additional_fields, add_tags=tags, )) wf = Workflow([host_fw] + end_point_fws + evaluate_path_fws) wf = use_custodian(wf, custodian_params={"handler_group": handler_group}) if isinstance(tags, (list)): wf = add_tags(wf, tags) if isinstance(additional_fields, (dict)): wf = add_additional_fields_to_taskdocs(wf, update_dict=additional_fields) if powerup_dicts is not None: wf = powerup_by_kwargs(wf, powerup_dicts) for fw in wf.fws: fw.spec["vasp_powerups"] = powerup_dicts wf.metadata.update({"approx_neb_wf_uuid": wf_uuid}) wf.name = name return wf
def launch_workflow(self, launchpad_dir="", k_product=50, job=None, user_incar_settings=None, potcar_functional='PBE', additional_handlers=[]): """ Creates a list of Fireworks. Each Firework represents calculations that will be done on a slab system of a compound in a specific orientation. Each Firework contains a oriented unit cell relaxation job and a WriteSlabVaspInputs which creates os. Firework(s) depending on whether or not Termination=True. Vasp outputs from all slab and oriented unit cell calculations will then be inserted into a database. Args: launchpad_dir (str path): The path to my_launchpad.yaml. Defaults to the current working directory containing your runs k_product: kpts[0][0]*a. Decide k density without kpoint0, default to 50 cwd: (str path): The curent working directory. Location of where you want your vasp outputs to be. job (VaspJob): The command (cmd) entered into VaspJob object. Default is specifically set for running vasp jobs on Carver at NERSC (use aprun for Hopper or Edison). user_incar_settings(dict): A dict specifying additional incar settings, default to None (ediff_per_atom=False) potcar_functional (str): default to PBE """ launchpad = LaunchPad.from_file( os.path.join(os.environ["HOME"], launchpad_dir, "my_launchpad.yaml")) if self.reset: launchpad.reset('', require_password=False) # Scratch directory reffered to by custodian. # May be different on non-Nersc systems. if not job: job = VaspJob(["mpirun", "-n", "64", "vasp"], auto_npar=False, copy_magmom=True) handlers = [ VaspErrorHandler(), NonConvergingErrorHandler(), UnconvergedErrorHandler(), PotimErrorHandler(), PositiveEnergyErrorHandler(), FrozenJobErrorHandler(timeout=3600) ] if additional_handlers: handlers.extend(additional_handlers) cust_params = { "custodian_params": { "scratch_dir": os.path.join("/global/scratch2/sd/", os.environ["USER"]) }, "jobs": job.double_relaxation_run(job.vasp_cmd, auto_npar=False), "handlers": handlers, "max_errors": 100 } # will return a list of jobs # instead of just being one job fws = [] for key in self.miller_dict.keys(): # Enumerate through all compounds in the dictionary, # the key is the compositional formula of the compound print key for miller_index in self.miller_dict[key]: # Enumerates through all miller indices we # want to create slabs of that compound from print str(miller_index) max_norm = max( miller_index) if self.max_normal_search else None # Whether or not we want to use the # max_normal_search algorithm from surface.py print 'true or false max norm is ', max_norm, self.max_normal_search slab = SlabGenerator(self.unit_cells_dict[key][0], miller_index, self.ssize, self.vsize, max_normal_search=max_norm) oriented_uc = slab.oriented_unit_cell if self.fail_safe and len(oriented_uc) > 199: break # This method only creates the oriented unit cell, the # slabs are created in the WriteSlabVaspInputs task. # WriteSlabVaspInputs will create the slabs from # the contcar of the oriented unit cell calculation handler = [] tasks = [] folderbulk = '/%s_%s_k%s_s%sv%s_%s%s%s' % ( oriented_uc.composition.reduced_formula, 'bulk', k_product, self.ssize, self.vsize, str(miller_index[0]), str(miller_index[1]), str(miller_index[2])) cwd = os.getcwd() if self.get_bulk_e: tasks.extend([ WriteUCVaspInputs( oriented_ucell=oriented_uc, folder=folderbulk, cwd=cwd, user_incar_settings=user_incar_settings, potcar_functional=potcar_functional, k_product=k_product), RunCustodianTask(dir=folderbulk, cwd=cwd, **cust_params), VaspSlabDBInsertTask(struct_type="oriented_unit_cell", loc=folderbulk, cwd=cwd, miller_index=miller_index, **self.vaspdbinsert_params) ]) # Slab will inherit average final magnetic moment # of the bulk from outcar, will have to generalize # this for systems with different elements later # element = oriented_uc.species[0] # out = Outcar(cwd+folderbulk) # out_mag = out.magnetization # tot_mag = [mag['tot'] for mag in out_mag] # magmom = np.mean(tot_mag) # user_incar_settings['MAGMOM'] = {element: magmom} tasks.append( WriteSlabVaspInputs( folder=folderbulk, cwd=cwd, user_incar_settings=user_incar_settings, terminations=self.terminations, custodian_params=cust_params, vaspdbinsert_parameters=self.vaspdbinsert_params, potcar_functional=potcar_functional, k_product=k_product, miller_index=miller_index, min_slab_size=self.ssize, min_vacuum_size=self.vsize, ucell=self.unit_cells_dict[key][0])) fw = Firework(tasks, name=folderbulk) fws.append(fw) wf = Workflow(fws, name='Surface Calculations') launchpad.add_wf(wf)
my_project = yaml.load( open('my_project.yaml')) ## this will be the only CLI input NAME = my_project['NAME'] INCAR_GENERAL = my_project['Incar_General'] POTCAR_SPEC = yaml.load(open(my_project['Potcar_Spec'])) MATERIALS_LIST = my_project['Insilico_Fab']['Material_List'] struct_list = [Poscar.from_file(poscar) for poscar in glob('StructsDir/POSCAR*') \ if 'StructsDir' in MATERIALS_LIST] + \ [Poscar(get_struct_from_mp(p)) for p in MATERIALS_LIST \ if 'StructsDir' not in p] WORKFLOWS = my_project['Workflow'] project_log = get_logger(NAME + "_InSilico_Materials") # general structure creation manipulation module , for example make slabs # make 2D material, or use a different source like GASP as well # insilico_fab(creator = SYNTH['creator'], script=SYNTH['script']) error_handler = [VaspErrorHandler()] steps = my_project['Workflow']['Steps'].keys() # testing [Relax] #print (steps) #print (struct_list) #print ('Reached past steps') Relax() #launch_daemon([Relax], interval=30,handlers=error_handler, ld_logger=project_log)
def test_brions(self): shutil.copy("INCAR.ibrion", "INCAR") h = VaspErrorHandler("vasp.brions") h.check() d = h.correct() self.assertEqual(d["errors"], ["brions"]) i = Incar.from_file("INCAR") self.assertEqual(i["IBRION"], 1) self.assertAlmostEqual(i["POTIM"], 1.5) h.check() d = h.correct() self.assertEqual(d["errors"], ["brions"]) i = Incar.from_file("INCAR") self.assertEqual(i["IBRION"], 2) self.assertAlmostEqual(i["POTIM"], 1.4) h.check() d = h.correct() self.assertEqual(d["errors"], ["brions"]) i = Incar.from_file("INCAR") self.assertEqual(i["IBRION"], 2) self.assertAlmostEqual(i["POTIM"], 1.5)
def test_zbrent(self): h = VaspErrorHandler("vasp.zbrent") h.check() d = h.correct() self.assertEqual(d["errors"], ["zbrent"]) i = Incar.from_file("INCAR") self.assertEqual(i["IBRION"], 1) h.check() d = h.correct() self.assertEqual(d["errors"], ["zbrent"]) i = Incar.from_file("INCAR") self.assertEqual(i["EDIFF"], 1e-6) self.assertEqual(i["NELMIN"], 6) shutil.copy("INCAR.orig", "INCAR") h = VaspErrorHandler("vasp.zbrent") h.vtst_fixes = True h.check() d = h.correct() self.assertEqual(d["errors"], ["zbrent"]) i = Incar.from_file("INCAR") self.assertEqual(i["IBRION"], 1) self.assertEqual(i["EDIFF"], 0.0004) h.check() d = h.correct() self.assertEqual(d["errors"], ["zbrent"]) i = Incar.from_file("INCAR") self.assertEqual(i["EDIFF"], 1e-6) self.assertEqual(i["NELMIN"], 6) self.assertEqual(i["IBRION"], 3) self.assertEqual(i["IOPT"], 7) self.assertEqual(i["POTIM"], 0) shutil.copy("INCAR.ediff", "INCAR") h = VaspErrorHandler("vasp.zbrent") h.check() d = h.correct() self.assertEqual(d["errors"], ["zbrent"]) i = Incar.from_file("INCAR") self.assertEqual(i["IBRION"], 1) self.assertEqual(i["EDIFF"], 1e-6) h.check() d = h.correct() self.assertEqual(d["errors"], ["zbrent"]) i = Incar.from_file("INCAR") self.assertEqual(i["EDIFF"], 1e-7) self.assertEqual(i["NELMIN"], 6)
def test_posmap(self): h = VaspErrorHandler("vasp.posmap") self.assertEqual(h.check(), True) self.assertEqual(h.correct()["errors"], ["posmap"]) i = Incar.from_file("INCAR") self.assertEqual(i["SYMPREC"], 1e-6)
def test_brmix(self): h = VaspErrorHandler("vasp.brmix") self.assertEqual(h.check(), True) # The first (no good OUTCAR) correction, check IMIX d = h.correct() self.assertEqual(d["errors"], ["brmix"]) vi = VaspInput.from_directory(".") self.assertEqual(vi["INCAR"]["IMIX"], 1) self.assertTrue(os.path.exists("CHGCAR")) # The next correction check Gamma and evenize h.correct() vi = VaspInput.from_directory(".") self.assertFalse("IMIX" in vi["INCAR"]) self.assertTrue(os.path.exists("CHGCAR")) if vi["KPOINTS"].style == Kpoints.supported_modes.Gamma and vi[ "KPOINTS"].num_kpts < 1: all_kpts_even = all(n % 2 == 0 for n in vi["KPOINTS"].kpts[0]) self.assertFalse(all_kpts_even) # The next correction check ISYM and no CHGCAR h.correct() vi = VaspInput.from_directory(".") self.assertEqual(vi["INCAR"]["ISYM"], 0) self.assertFalse(os.path.exists("CHGCAR")) shutil.copy("INCAR.nelect", "INCAR") h = VaspErrorHandler("vasp.brmix") self.assertEqual(h.check(), False) d = h.correct() self.assertEqual(d["errors"], [])
def test_check_correct(self): h = VaspErrorHandler("vasp.teterror") h.check() d = h.correct() self.assertEqual(d["errors"], ["tet"]) self.assertEqual( d["actions"], [{ "action": { "_set": { "ISMEAR": 0, "SIGMA": 0.05 } }, "dict": "INCAR" }], ) h = VaspErrorHandler("vasp.teterror", errors_subset_to_catch=["eddrmm"]) self.assertFalse(h.check()) h = VaspErrorHandler("vasp.sgrcon") h.check() d = h.correct() self.assertEqual(d["errors"], ["rot_matrix"]) self.assertEqual(set([a["dict"] for a in d["actions"]]), {"KPOINTS"}) h = VaspErrorHandler("vasp.real_optlay") h.check() d = h.correct() self.assertEqual(d["errors"], ["real_optlay"]) self.assertEqual(d["actions"], [{ "action": { "_set": { "LREAL": False } }, "dict": "INCAR" }]) subdir = os.path.join(test_dir, "large_cell_real_optlay") os.chdir(subdir) shutil.copy("INCAR", "INCAR.orig") h = VaspErrorHandler() h.check() d = h.correct() self.assertEqual(d["errors"], ["real_optlay"]) vi = VaspInput.from_directory(".") self.assertEqual(vi["INCAR"]["LREAL"], True) h.check() d = h.correct() self.assertEqual(d["errors"], ["real_optlay"]) vi = VaspInput.from_directory(".") self.assertEqual(vi["INCAR"]["LREAL"], False) shutil.copy("INCAR.orig", "INCAR") os.remove("INCAR.orig") os.remove("error.1.tar.gz") os.remove("error.2.tar.gz") os.chdir(test_dir)
def __init__(self, output_file="vasp.out"): VaspErrorHandler.__init__(self, output_file)
def run_task(self, fw_spec): handler_groups = { "default": [ VaspErrorHandler(), MeshSymmetryErrorHandler(), UnconvergedErrorHandler(), NonConvergingErrorHandler(), PotimErrorHandler(), PositiveEnergyErrorHandler(), FrozenJobErrorHandler() ], "strict": [ VaspErrorHandler(), MeshSymmetryErrorHandler(), UnconvergedErrorHandler(), NonConvergingErrorHandler(), PotimErrorHandler(), PositiveEnergyErrorHandler(), FrozenJobErrorHandler(), AliasingErrorHandler() ], "md": [VaspErrorHandler(), NonConvergingErrorHandler()], "no_handler": [] } vasp_cmd = env_chk(self["vasp_cmd"], fw_spec) if isinstance(vasp_cmd, six.string_types): vasp_cmd = os.path.expandvars(vasp_cmd) vasp_cmd = shlex.split(vasp_cmd) # initialize variables job_type = self.get("job_type", "normal") scratch_dir = env_chk(self.get("scratch_dir"), fw_spec) gzip_output = self.get("gzip_output", True) max_errors = self.get("max_errors", 5) auto_npar = env_chk(self.get("auto_npar"), fw_spec, strict=False, default=False) gamma_vasp_cmd = env_chk(self.get("gamma_vasp_cmd"), fw_spec, strict=False, default=None) if gamma_vasp_cmd: gamma_vasp_cmd = shlex.split(gamma_vasp_cmd) # construct jobs if job_type == "normal": jobs = [ VaspJob(vasp_cmd, auto_npar=auto_npar, gamma_vasp_cmd=gamma_vasp_cmd) ] elif job_type == "double_relaxation_run": jobs = VaspJob.double_relaxation_run(vasp_cmd, auto_npar=auto_npar, ediffg=self.get("ediffg"), half_kpts_first_relax=False) elif job_type == "full_opt_run": jobs = VaspJob.full_opt_run(vasp_cmd, auto_npar=auto_npar, ediffg=self.get("ediffg"), max_steps=5, half_kpts_first_relax=False) else: raise ValueError("Unsupported job type: {}".format(job_type)) # construct handlers handlers = handler_groups[self.get("handler_group", "default")] if self.get("max_force_threshold"): handlers.append( MaxForceErrorHandler( max_force_threshold=self["max_force_threshold"])) if self.get("wall_time"): handlers.append(WalltimeHandler(wall_time=self["wall_time"])) validators = [VasprunXMLValidator()] c = Custodian(handlers, jobs, validators=validators, max_errors=max_errors, scratch_dir=scratch_dir, gzipped_output=gzip_output) c.run()
def run_task(self, fw_spec): handler_groups = { "default": [ VaspErrorHandler(), MeshSymmetryErrorHandler(), UnconvergedErrorHandler(), NonConvergingErrorHandler(), PotimErrorHandler(), PositiveEnergyErrorHandler(), FrozenJobErrorHandler(), StdErrHandler(), DriftErrorHandler() ], "strict": [ VaspErrorHandler(), MeshSymmetryErrorHandler(), UnconvergedErrorHandler(), NonConvergingErrorHandler(), PotimErrorHandler(), PositiveEnergyErrorHandler(), FrozenJobErrorHandler(), StdErrHandler(), AliasingErrorHandler(), DriftErrorHandler() ], "md": [VaspErrorHandler(), NonConvergingErrorHandler()], "no_handler": [] } vasp_cmd = env_chk(self["vasp_cmd"], fw_spec) if isinstance(vasp_cmd, str): vasp_cmd = os.path.expandvars(vasp_cmd) vasp_cmd = shlex.split(vasp_cmd) # initialize variables job_type = self.get("job_type", "normal") scratch_dir = env_chk(self.get("scratch_dir"), fw_spec) gzip_output = self.get("gzip_output", True) max_errors = self.get("max_errors", CUSTODIAN_MAX_ERRORS) auto_npar = env_chk(self.get("auto_npar"), fw_spec, strict=False, default=False) gamma_vasp_cmd = env_chk(self.get("gamma_vasp_cmd"), fw_spec, strict=False, default=None) if gamma_vasp_cmd: gamma_vasp_cmd = shlex.split(gamma_vasp_cmd) # construct jobs if job_type == "normal": jobs = [ VaspJob(vasp_cmd, auto_npar=auto_npar, gamma_vasp_cmd=gamma_vasp_cmd) ] elif job_type == "double_relaxation_run": jobs = VaspJob.double_relaxation_run( vasp_cmd, auto_npar=auto_npar, ediffg=self.get("ediffg"), half_kpts_first_relax=self.get("half_kpts_first_relax", HALF_KPOINTS_FIRST_RELAX)) elif job_type == "metagga_opt_run": jobs = VaspJob.metagga_opt_run(vasp_cmd, auto_npar=auto_npar, ediffg=self.get("ediffg"), half_kpts_first_relax=self.get( "half_kpts_first_relax", HALF_KPOINTS_FIRST_RELAX)) elif job_type == "full_opt_run": jobs = VaspJob.full_opt_run(vasp_cmd, auto_npar=auto_npar, ediffg=self.get("ediffg"), max_steps=9, half_kpts_first_relax=self.get( "half_kpts_first_relax", HALF_KPOINTS_FIRST_RELAX)) elif job_type == "neb": # TODO: @shyuep @HanmeiTang This means that NEB can only be run (i) in reservation mode # and (ii) when the queueadapter parameter is overridden and (iii) the queue adapter # has a convention for nnodes (with that name). Can't the number of nodes be made a # parameter that the user sets differently? e.g., fw_spec["neb_nnodes"] must be set # when setting job_type=NEB? Then someone can use this feature in non-reservation # mode and without this complication. -computron nnodes = int(fw_spec["_queueadapter"]["nnodes"]) # TODO: @shyuep @HanmeiTang - I am not sure what the code below is doing. It looks like # it is trying to override the number of processors. But I tried running the code # below after setting "vasp_cmd = 'mpirun -n 16 vasp'" and the code fails. # (i) Is this expecting an array vasp_cmd rather than String? If so, that's opposite to # the rest of this task's convention and documentation # (ii) can we get rid of this hacking in the first place? e.g., allowing the user to # separately set the NEB_VASP_CMD as an env_variable and not rewriting the command # inside this. # -computron # Index the tag "-n" or "-np" index = [i for i, s in enumerate(vasp_cmd) if '-n' in s] ppn = int(vasp_cmd[index[0] + 1]) vasp_cmd[index[0] + 1] = str(nnodes * ppn) # Do the same for gamma_vasp_cmd if gamma_vasp_cmd: index = [i for i, s in enumerate(gamma_vasp_cmd) if '-n' in s] ppn = int(gamma_vasp_cmd[index[0] + 1]) gamma_vasp_cmd[index[0] + 1] = str(nnodes * ppn) jobs = [ VaspNEBJob(vasp_cmd, final=False, auto_npar=auto_npar, gamma_vasp_cmd=gamma_vasp_cmd) ] else: raise ValueError("Unsupported job type: {}".format(job_type)) # construct handlers handler_group = self.get("handler_group", "default") if isinstance(handler_group, str): handlers = handler_groups[handler_group] else: handlers = handler_group if self.get("max_force_threshold"): handlers.append( MaxForceErrorHandler( max_force_threshold=self["max_force_threshold"])) if self.get("wall_time"): handlers.append(WalltimeHandler(wall_time=self["wall_time"])) if job_type == "neb": validators = [ ] # CINEB vasprun.xml sometimes incomplete, file structure different else: validators = [VasprunXMLValidator(), VaspFilesValidator()] c = Custodian(handlers, jobs, validators=validators, max_errors=max_errors, scratch_dir=scratch_dir, gzipped_output=gzip_output) c.run() if os.path.exists(zpath("custodian.json")): stored_custodian_data = { "custodian": loadfn(zpath("custodian.json")) } return FWAction(stored_data=stored_custodian_data)
def test_check_correct(self): h = VaspErrorHandler("vasp.teterror") h.check() d = h.correct() self.assertEqual(d["errors"], ['tet']) self.assertEqual(d["actions"], [{'action': {'_set': {'ISMEAR': 0}}, 'dict': 'INCAR'}]) h = VaspErrorHandler("vasp.teterror", errors_subset_to_catch=["eddrmm"]) self.assertFalse(h.check()) h = VaspErrorHandler("vasp.sgrcon") h.check() d = h.correct() self.assertEqual(d["errors"], ['rot_matrix']) self.assertEqual(set([a["dict"] for a in d["actions"]]), {"KPOINTS"}) h = VaspErrorHandler("vasp.real_optlay") h.check() d = h.correct() self.assertEqual(d["errors"], ['real_optlay']) self.assertEqual(d["actions"], [{'action': {'_set': {'LREAL': False}}, 'dict': 'INCAR'}]) subdir = os.path.join(test_dir, "large_cell_real_optlay") os.chdir(subdir) shutil.copy("INCAR", "INCAR.orig") h = VaspErrorHandler() h.check() d = h.correct() self.assertEqual(d["errors"], ['real_optlay']) vi = VaspInput.from_directory(".") self.assertEqual(vi["INCAR"]["LREAL"], True) h.check() d = h.correct() self.assertEqual(d["errors"], ['real_optlay']) vi = VaspInput.from_directory(".") self.assertEqual(vi["INCAR"]["LREAL"], False) shutil.copy("INCAR.orig", "INCAR") os.remove("INCAR.orig") os.remove("error.1.tar.gz") os.remove("error.2.tar.gz") os.chdir(test_dir)
except: launch_dir = launch_dir.replace('garden/dev', 'garden') try: os.chdir(launch_dir) except: print ' |===> could not find launch directory in usual locations' lpdb.rerun_fw(fw['fw_id']) print ' |===> marked for RERUN' counter['LOCATION_NOT_FOUND'] += 1 continue print ' |===>', launch_dir vaspout = os.path.join(launch_dir, "vasp.out") if not os.path.exists(vaspout): vaspout = os.path.join(launch_dir, "vasp.out.gz") try: h = VaspErrorHandler(vaspout) h.check() d = h.correct() except: counter['GGA_static_Handler_Error'] += 1 if d['errors']: for err in d['errors']: counter['GGA_static_' + err] += 1 if 'brmix' in d['errors']: #lpdb.rerun_fw(fw['fw_id']) print ' |===> BRMIX error -> marked for RERUN with alternative strategy' else: print ' |===> no vasp error indicated -> TODO' counter['GGA_STATIC_NO_VASP_ERROR'] += 1 os.chdir(cwd) else: