def test_setup(self): if "VASP_PSP_DIR" not in os.environ: os.environ["VASP_PSP_DIR"] = test_dir os.chdir(os.path.join(test_dir, 'setup_neb')) v = VaspNEBJob("hello", half_kpts=True) v.setup() incar = Incar.from_file("INCAR") count = multiprocessing.cpu_count() if count > 1: self.assertGreater(incar["NPAR"], 1) kpt = Kpoints.from_file("KPOINTS") kpt_pre = Kpoints.from_file("KPOINTS.orig") self.assertEqual(kpt_pre.style.name, "Monkhorst") self.assertEqual(kpt.style.name, "Gamma") shutil.copy("KPOINTS.orig", "KPOINTS") os.remove("INCAR.orig") os.remove("KPOINTS.orig") os.remove("POTCAR.orig") poscars = glob.glob("[0-9][0-9]/POSCAR.orig") for p in poscars: os.remove(p)
def test_postprocess(self): neb_outputs = ['INCAR', 'KPOINTS', 'POTCAR', 'vasprun.xml'] neb_sub_outputs = [ 'CHG', 'CHGCAR', 'CONTCAR', 'DOSCAR', 'EIGENVAL', 'IBZKPT', 'PCDAT', 'POSCAR', 'REPORT', 'PROCAR', 'OSZICAR', 'OUTCAR', 'WAVECAR', 'XDATCAR' ] os.chdir(os.path.join(test_dir, 'postprocess_neb')) postprocess_neb = os.path.abspath(".") v = VaspNEBJob("hello", final=False, suffix=".test") v.postprocess() for f in neb_outputs: self.assertTrue(os.path.isfile('{}.test'.format(f))) os.remove('{}.test'.format(f)) sub_folders = glob.glob("[0-9][0-9]") for sf in sub_folders: os.chdir(os.path.join(postprocess_neb, sf)) for f in neb_sub_outputs: if os.path.exists(f): self.assertTrue(os.path.isfile('{}.test'.format(f))) os.remove('{}.test'.format(f))
def test_setup(self): with cd(os.path.join(test_dir, 'setup_neb')): with ScratchDir('.', copy_from_current_on_enter=True) as d: v = VaspNEBJob("hello", half_kpts=True) v.setup() incar = Incar.from_file("INCAR") count = multiprocessing.cpu_count() if count > 3: self.assertGreater(incar["NPAR"], 1) kpt = Kpoints.from_file("KPOINTS") kpt_pre = Kpoints.from_file("KPOINTS.orig") self.assertEqual(kpt_pre.style.name, "Monkhorst") self.assertEqual(kpt.style.name, "Gamma")
def test_postprocess(self): neb_outputs = ["INCAR", "KPOINTS", "POTCAR", "vasprun.xml"] neb_sub_outputs = [ "CHG", "CHGCAR", "CONTCAR", "DOSCAR", "EIGENVAL", "IBZKPT", "PCDAT", "POSCAR", "REPORT", "PROCAR", "OSZICAR", "OUTCAR", "WAVECAR", "XDATCAR", ] with cd(os.path.join(test_dir, "postprocess_neb")): postprocess_neb = os.path.abspath(".") v = VaspNEBJob("hello", final=False, suffix=".test") v.postprocess() for f in neb_outputs: self.assertTrue(os.path.isfile(f"{f}.test")) os.remove(f"{f}.test") sub_folders = glob.glob("[0-9][0-9]") for sf in sub_folders: os.chdir(os.path.join(postprocess_neb, sf)) for f in neb_sub_outputs: if os.path.exists(f): self.assertTrue(os.path.isfile(f"{f}.test")) os.remove(f"{f}.test")
def test_postprocess(self): neb_outputs = ['INCAR', 'KPOINTS', 'POTCAR', 'vasprun.xml'] neb_sub_outputs = ['CHG', 'CHGCAR', 'CONTCAR', 'DOSCAR', 'EIGENVAL', 'IBZKPT', 'PCDAT', 'POSCAR', 'REPORT', 'PROCAR', 'OSZICAR', 'OUTCAR', 'WAVECAR', 'XDATCAR'] with cd(os.path.join(test_dir, 'postprocess_neb')): postprocess_neb = os.path.abspath(".") v = VaspNEBJob("hello", final=False, suffix=".test") v.postprocess() for f in neb_outputs: self.assertTrue(os.path.isfile('{}.test'.format(f))) os.remove('{}.test'.format(f)) sub_folders = glob.glob("[0-9][0-9]") for sf in sub_folders: os.chdir(os.path.join(postprocess_neb, sf)) for f in neb_sub_outputs: if os.path.exists(f): self.assertTrue(os.path.isfile('{}.test'.format(f))) os.remove('{}.test'.format(f))
def test_to_from_dict(self): v = VaspNEBJob("hello") v2 = VaspNEBJob.from_dict(v.as_dict()) self.assertEqual(type(v2), type(v)) self.assertEqual(v2.vasp_cmd, "hello")
def run_task(self, fw_spec): handler_groups = { "default": [ VaspErrorHandler(), MeshSymmetryErrorHandler(), UnconvergedErrorHandler(), NonConvergingErrorHandler(), PotimErrorHandler(), PositiveEnergyErrorHandler(), FrozenJobErrorHandler(), StdErrHandler(), DriftErrorHandler() ], "strict": [ VaspErrorHandler(), MeshSymmetryErrorHandler(), UnconvergedErrorHandler(), NonConvergingErrorHandler(), PotimErrorHandler(), PositiveEnergyErrorHandler(), FrozenJobErrorHandler(), StdErrHandler(), AliasingErrorHandler(), DriftErrorHandler() ], "md": [VaspErrorHandler(), NonConvergingErrorHandler()], "no_handler": [] } vasp_cmd = env_chk(self["vasp_cmd"], fw_spec) if isinstance(vasp_cmd, str): vasp_cmd = os.path.expandvars(vasp_cmd) vasp_cmd = shlex.split(vasp_cmd) # initialize variables job_type = self.get("job_type", "normal") scratch_dir = env_chk(self.get("scratch_dir"), fw_spec) gzip_output = self.get("gzip_output", True) max_errors = self.get("max_errors", CUSTODIAN_MAX_ERRORS) auto_npar = env_chk(self.get("auto_npar"), fw_spec, strict=False, default=False) gamma_vasp_cmd = env_chk(self.get("gamma_vasp_cmd"), fw_spec, strict=False, default=None) if gamma_vasp_cmd: gamma_vasp_cmd = shlex.split(gamma_vasp_cmd) # construct jobs if job_type == "normal": jobs = [ VaspJob(vasp_cmd, auto_npar=auto_npar, gamma_vasp_cmd=gamma_vasp_cmd) ] elif job_type == "double_relaxation_run": jobs = VaspJob.double_relaxation_run( vasp_cmd, auto_npar=auto_npar, ediffg=self.get("ediffg"), half_kpts_first_relax=self.get("half_kpts_first_relax", HALF_KPOINTS_FIRST_RELAX)) elif job_type == "metagga_opt_run": jobs = VaspJob.metagga_opt_run(vasp_cmd, auto_npar=auto_npar, ediffg=self.get("ediffg"), half_kpts_first_relax=self.get( "half_kpts_first_relax", HALF_KPOINTS_FIRST_RELAX)) elif job_type == "full_opt_run": jobs = VaspJob.full_opt_run(vasp_cmd, auto_npar=auto_npar, ediffg=self.get("ediffg"), max_steps=9, half_kpts_first_relax=self.get( "half_kpts_first_relax", HALF_KPOINTS_FIRST_RELAX)) elif job_type == "neb": # TODO: @shyuep @HanmeiTang This means that NEB can only be run (i) in reservation mode # and (ii) when the queueadapter parameter is overridden and (iii) the queue adapter # has a convention for nnodes (with that name). Can't the number of nodes be made a # parameter that the user sets differently? e.g., fw_spec["neb_nnodes"] must be set # when setting job_type=NEB? Then someone can use this feature in non-reservation # mode and without this complication. -computron nnodes = int(fw_spec["_queueadapter"]["nnodes"]) # TODO: @shyuep @HanmeiTang - I am not sure what the code below is doing. It looks like # it is trying to override the number of processors. But I tried running the code # below after setting "vasp_cmd = 'mpirun -n 16 vasp'" and the code fails. # (i) Is this expecting an array vasp_cmd rather than String? If so, that's opposite to # the rest of this task's convention and documentation # (ii) can we get rid of this hacking in the first place? e.g., allowing the user to # separately set the NEB_VASP_CMD as an env_variable and not rewriting the command # inside this. # -computron # Index the tag "-n" or "-np" index = [i for i, s in enumerate(vasp_cmd) if '-n' in s] ppn = int(vasp_cmd[index[0] + 1]) vasp_cmd[index[0] + 1] = str(nnodes * ppn) # Do the same for gamma_vasp_cmd if gamma_vasp_cmd: index = [i for i, s in enumerate(gamma_vasp_cmd) if '-n' in s] ppn = int(gamma_vasp_cmd[index[0] + 1]) gamma_vasp_cmd[index[0] + 1] = str(nnodes * ppn) jobs = [ VaspNEBJob(vasp_cmd, final=False, auto_npar=auto_npar, gamma_vasp_cmd=gamma_vasp_cmd) ] else: raise ValueError("Unsupported job type: {}".format(job_type)) # construct handlers handler_group = self.get("handler_group", "default") if isinstance(handler_group, str): handlers = handler_groups[handler_group] else: handlers = handler_group if self.get("max_force_threshold"): handlers.append( MaxForceErrorHandler( max_force_threshold=self["max_force_threshold"])) if self.get("wall_time"): handlers.append(WalltimeHandler(wall_time=self["wall_time"])) if job_type == "neb": validators = [ ] # CINEB vasprun.xml sometimes incomplete, file structure different else: validators = [VasprunXMLValidator(), VaspFilesValidator()] c = Custodian(handlers, jobs, validators=validators, max_errors=max_errors, scratch_dir=scratch_dir, gzipped_output=gzip_output) c.run() if os.path.exists(zpath("custodian.json")): stored_custodian_data = { "custodian": loadfn(zpath("custodian.json")) } return FWAction(stored_data=stored_custodian_data)