def run_vasp(override=[], suffix=''): ''' execute vasp with given override and suffix :param override: :param suffix: :return: ''' from Classes_Pymatgen import Incar from Classes_Custodian import StandardJob from custodian.custodian import Custodian import os # Determine wheter to use Gamma optimized vasp incar = Incar.from_file('INCAR') if 'AUTO_GAMMA' in incar and incar['AUTO_GAMMA']: vasp = os.environ['VASP_GAMMA'] else: vasp = os.environ['VASP_KPTS'] handlers = [] vaspjob = [StandardJob(['mpirun', '-np', os.environ['VASP_PROCS'], vasp], 'vasp.log', auto_npar=False, backup=False, settings_override=override, suffix=suffix, final=False)] c = Custodian(handlers, vaspjob, max_errors=10) c.run()
def run(self, job_cmd=None): """ run the vasp jobs through custodian if the job list is empty, run a single job with the initial input set """ for j in self.jobs: if job_cmd is not None: j.job_cmd = job_cmd else: j.job_cmd = self.job_cmd c_params = {'jobs': [j.as_dict() for j in self.jobs], 'handlers': [h.as_dict() for h in self.handlers], 'max_errors': 5} c = Custodian(self.handlers, self.jobs, max_errors=5) c.run() for j in self.jobs: self.cal_log.append({"job": j.as_dict(), 'job_id': j.job_id, "corrections": [], 'final_energy': None}) self.job_ids.append(j.job_id) if self.checkpoint_file: dumpfn(self.cal_log, self.checkpoint_file, cls=MontyEncoder, indent=4) else: dumpfn(self.cal_log, Calibrate.LOG_FILE, cls=MontyEncoder, indent=4)
def run(self, fw_spec): # class VaspJob(Job): # """ # A basic vasp job. Just runs whatever is in the directory. But conceivably # can be a complex processing of inputs etc. with initialization. # """ # # def __init__(self, vasp_cmd, output_file="vasp.out", stderr_file="std_err.txt", # suffix="", final=True, backup=True, auto_npar=True, # auto_gamma=True, settings_override=None, # gamma_vasp_cmd=None, copy_magmom=False, auto_continue=False): try: vasp_cmd = os.environ['VASP_CMD'].split() except: raise ValueError('Unable to find vasp command') if 'custodian_jobs' in fw_spec: jobs = fw_spec['custodian_jobs'] else: jobs = [VaspJob(vasp_cmd=vasp_cmd, auto_npar=False, output_file=os.path.join(self.run_dir, 'vasp.out'), stderr_file=os.path.join(self.run_dir, 'std_err.txt'), backup=False, auto_gamma=False)] custodian = Custodian(handlers=self.custodian_handlers, jobs=jobs, validators=None, max_errors=10, polling_time_step=10, monitor_freq=30) custodian.run()
def do_run(args): handlers = [VaspErrorHandler(), UnconvergedErrorHandler()] c = Custodian(handlers, get_runs(vasp_command=args.command.split(), target=args.target, mode=args.mode, max_steps=args.max_steps), max_errors=10) c.run()
def do_run(args): handlers = [VaspErrorHandler(), MeshSymmetryErrorHandler(), UnconvergedErrorHandler(), NonConvergingErrorHandler(), PotimErrorHandler()] c = Custodian(handlers, get_runs(args), max_errors=10, gzipped_output=args.gzip) c.run() logging.info("Geometry optimization complete")
def _test_simplejob(self): os.chdir(self.path) njobs = 100 params = {"initial": 0, "total": 0} c = Custodian([ExampleHandler(params), ExampleHandler2(params)], [ExampleJob(i, params) for i in range(njobs)], max_errors=njobs*2) c.run()
def test_max_errors_per_handler_warning(self): njobs = 100 params = {"initial": 0, "total": 0} c = Custodian([ExampleHandler1c(params)], [ExampleJob(i, params) for i in range(njobs)], max_errors=njobs*10, max_errors_per_job=1000) c.run() self.assertTrue(all(len(r["corrections"]) <= 2 for r in c.run_log))
def test_exitcode_error(self): c = Custodian([], [ExitCodeJob(0)]) c.run() c = Custodian([], [ExitCodeJob(1)]) self.assertRaises(RuntimeError, c.run) c = Custodian([], [ExitCodeJob(1)], terminate_on_nonzero_returncode=False) c.run()
def test_exitcode_error(self): c = Custodian([], [ExitCodeJob(0)]) c.run() c = Custodian([], [ExitCodeJob(1)]) self.assertRaises(ReturnCodeError, c.run) self.assertTrue(c.run_log[-1]["nonzero_return_code"]) c = Custodian([], [ExitCodeJob(1)], terminate_on_nonzero_returncode=False) c.run()
def do_run(args): logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO, filename="run.log") job = NwchemJob(nwchem_cmd=args.command.split(), input_file=args.infile, output_file=args.outfile) c = Custodian([NwchemErrorHandler(output_filename=args.outfile)], [job], max_errors=5, scratch_dir=args.scratch, gzipped_output=args.gzipped, checkpoint=True) c.run()
def do_run(args): FORMAT = '%(asctime)s %(message)s' logging.basicConfig(format=FORMAT, level=logging.INFO, filename="run.log") logging.info("Handlers used are %s" % args.handlers) handlers = [load_class("custodian.vasp.handlers", n) for n in args.handlers] validators = [load_class("custodian.vasp.validators", n) for n in args.validators] c = Custodian(handlers, get_jobs(args), validators, max_errors=args.max_errors, scratch_dir=args.scratch, gzipped_output=args.gzip, checkpoint=True) c.run()
def test_unrecoverable(self): njobs = 100 params = {"initial": 0, "total": 0} h = ExampleHandler2(params) c = Custodian([h], [ExampleJob(i, params) for i in range(njobs)], max_errors=njobs) self.assertRaises(RuntimeError, c.run) self.assertTrue(h.has_error) h = ExampleHandler2b(params) c = Custodian([h], [ExampleJob(i, params) for i in range(njobs)], max_errors=njobs) c.run() self.assertTrue(h.has_error)
def run_task(self, fw_spec): # write a file containing the formula and task_type for somewhat easier file system browsing self._write_formula_file(fw_spec) # TODO: make this better - is there a way to load an environment variable as the VASP_EXE? if 'nid' in socket.gethostname(): # hopper compute nodes v_exe = shlex.split('aprun -n 48 vasp') # TODO: make ncores dynamic! elif 'c' in socket.gethostname(): # carver / mendel compute nodes v_exe = shlex.split('mpirun -n 32 vasp') # TODO: make ncores dynamic! else: raise ValueError('Unrecognized host!') for job in self.jobs: job.vasp_command = v_exe c = Custodian(self.handlers, self.jobs, self.max_errors) custodian_out = c.run() all_errors = set() for run in custodian_out: for correction in run['corrections']: all_errors.update(correction['errors']) stored_data = {'error_list': list(all_errors)} update_spec = {'prev_vasp_dir': os.getcwd(), 'prev_task_type': fw_spec['task_type']} update_spec.update({'mpsnl': fw_spec['mpsnl'], 'snlgroup_id': fw_spec['snlgroup_id']}) return FWAction(stored_data=stored_data, update_spec=update_spec)
def do_run(args): """ Perform the run. """ handlers = [ VaspErrorHandler(), MeshSymmetryErrorHandler(), UnconvergedErrorHandler(), NonConvergingErrorHandler(), PotimErrorHandler(), ] c = Custodian(handlers, get_runs(args), max_errors=10, gzipped_output=args.gzip) c.run() logging.info("Geometry optimization complete")
def test_run(self): njobs = 100 params = {"initial": 0, "total": 0} c = Custodian([ExampleHandler(params)], [ExampleJob(i, params) for i in xrange(njobs)], max_errors=njobs, log_file=None) output = c.run() self.assertEqual(len(output), njobs)
def run_task(self, fw_spec): # write a file containing the formula and task_type for somewhat # easier file system browsing self._write_formula_file(fw_spec) fw_env = fw_spec.get("_fw_env", {}) if "mpi_cmd" in fw_env: mpi_cmd = fw_spec["_fw_env"]["mpi_cmd"] elif which("mpirun"): mpi_cmd = "mpirun" elif which("aprun"): mpi_cmd = "aprun" else: raise ValueError("No MPI command found!") nproc = os.environ['PBS_NP'] v_exe = shlex.split('{} -n {} {}'.format(mpi_cmd, nproc, fw_env.get("vasp_cmd", "vasp"))) gv_exe = shlex.split('{} -n {} {}'.format(mpi_cmd, nproc, fw_env.get("gvasp_cmd", "gvasp"))) print 'host:', os.environ['HOSTNAME'] for job in self.jobs: job.vasp_cmd = v_exe job.gamma_vasp_cmd = gv_exe incar_errors = check_incar(fw_spec['task_type']) if incar_errors: raise ValueError("Critical error: INCAR does not pass checks: {}".format(incar_errors)) logging.basicConfig(level=logging.DEBUG) c = Custodian(self.handlers, self.jobs, max_errors=self.max_errors, gzipped_output=False, validators=[VasprunXMLValidator()]) # manual gzip custodian_out = c.run() if self.gzip_output: for f in os.listdir(os.getcwd()): if not f.lower().endswith("gz") and not f.endswith(".OU") and not f.endswith(".ER"): with open(f, 'rb') as f_in, \ GzipFile('{}.gz'.format(f), 'wb') as f_out: f_out.writelines(f_in) os.remove(f) all_errors = set() for run in custodian_out: for correction in run['corrections']: all_errors.update(correction['errors']) stored_data = {'error_list': list(all_errors)} update_spec = {'prev_vasp_dir': os.getcwd(), 'prev_task_type': fw_spec['task_type'], 'mpsnl': fw_spec['mpsnl'], 'snlgroup_id': fw_spec['snlgroup_id'], 'run_tags': fw_spec['run_tags'], 'parameters': fw_spec.get('parameters')} return FWAction(stored_data=stored_data, update_spec=update_spec)
def test_unrecoverable(self): njobs = 100 params = {"initial": 0, "total": 0} c = Custodian([ExampleHandler2(params)], [ExampleJob(i, params) for i in xrange(njobs)], max_errors=njobs, log_file=None) output = c.run() #Because this is unrecoverable, there should only be one output. self.assertEqual(len(output), 1)
def test_checkpoint_loading(self): njobs = 5 params = {"initial": 0, "total": 0} c = Custodian([ExampleHandler(params)], [ExampleJob(i, params) for i in range(njobs)], [ExampleValidator1()], max_errors=100, checkpoint=True) self.assertEqual(len(c.run_log), 3) self.assertEqual(len(c.run()), 5)
def do_run(args): logging.basicConfig(format="%(asctime)s %(message)s", level=logging.INFO, filename="run.log") job = NwchemJob( nwchem_cmd=args.command.split(), input_file=args.infile, output_file=args.outfile, ) c = Custodian( [NwchemErrorHandler(output_filename=args.outfile)], [job], max_errors=5, scratch_dir=args.scratch, gzipped_output=args.gzipped, checkpoint=True, ) c.run()
def test_run(self): njobs = 100 params = {"initial": 0, "total": 0} c = Custodian([ExampleHandler(params)], [ExampleJob(i, params) for i in range(njobs)], max_errors=njobs) output = c.run() self.assertEqual(len(output), njobs) d = ExampleHandler(params).as_dict()
def test_run(self): njobs = 100 params = {"initial": 0, "total": 0} c = Custodian([ExampleHandler(params)], [ExampleJob(i, params) for i in range(njobs)], max_errors=njobs) output = c.run() self.assertEqual(len(output), njobs) print(ExampleHandler(params).as_dict())
def run_vasp(override=[], suffix=''): """ execute vasp with given override and suffix :param override: :param suffix: :return: """ from Classes_Pymatgen import Incar from Classes_Custodian import StandardJob from custodian.custodian import Custodian import os # Determine wheter to use Gamma optimized vasp incar = Incar.from_file('INCAR') if 'AUTO_GAMMA' in incar and incar['AUTO_GAMMA']: vasp = os.environ['VASP_GAMMA'] else: vasp = os.environ['VASP_KPTS'] handlers = [VaspErrorHandler(output_filename='vasp.log')] handlers = [] if os.environ['VASP_MPI'] == 'srun': vaspjob = [ StandardJob(['srun', vasp], 'vasp.log', auto_npar=False, backup=False, settings_override=override, suffix=suffix, final=False) ] else: vaspjob = [ StandardJob(['mpirun', '-np', os.environ['VASP_PROCS'], vasp], 'vasp.log', auto_npar=False, backup=False, settings_override=override, suffix=suffix, final=False) ] c = Custodian(handlers, vaspjob, max_errors=10) c.run()
def do_run(args): FORMAT = '%(asctime)s %(message)s' logging.basicConfig(format=FORMAT, level=logging.INFO, filename="run.log") logging.info("Handlers used are %s" % args.handlers) handlers = [ load_class("custodian.vasp.handlers", n) for n in args.handlers ] validators = [ load_class("custodian.vasp.validators", n) for n in args.validators ] c = Custodian(handlers, get_jobs(args), validators, max_errors=args.max_errors, scratch_dir=args.scratch, gzipped_output=args.gzip, checkpoint=True) c.run()
def vasp_run(args): if len(args.vasp_cmd) == 0: raise NoVaspCommandError("Vasp command must be specified.") elif len(args.vasp_cmd) == 1: vasp_cmd = args.vasp_cmd[0].split() else: vasp_cmd = args.vasp_cmd flags = list(chain.from_iterable(incar_flags.values())) user_incar_settings = list2dict(args.user_incar_setting, flags) handlers = HANDLER_GROUP["default"] if args.timeout: handlers.pop(-1) handlers.append(TooLongTimeCalcErrorHandler(args.timeout)) optimization_args = {"vasp_cmd": vasp_cmd, "removes_wavecar": args.rm_wavecar, "max_relax_num": args.max_relax_num, "left_files": args.left_files, "removed_files": ["PCDAT", "vasprun.xml"]} custodian_args = {"handlers": handlers, "polling_time_step": 5, "monitor_freq": 1, "max_errors": 10, "gzipped_output": False} if args.kpoint_conv: xc = args.xc or Xc.pbesol custodian_args["jobs"] = ViseVaspJob.kpt_converge( xc=xc, convergence_criterion=args.kpoints_criteria, # initial_kpt_density=args.kpoint_density, user_incar_settings=user_incar_settings, **optimization_args) else: custodian_args["jobs"] = ViseVaspJob.structure_optimization_run( **optimization_args) c = Custodian(**custodian_args) c.run()
def run_task(self, fw_spec): handler_groups = { "default": [VaspErrorHandler(), MeshSymmetryErrorHandler(), UnconvergedErrorHandler(), NonConvergingErrorHandler(),PotimErrorHandler(), PositiveEnergyErrorHandler(), FrozenJobErrorHandler(), StdErrHandler(), DriftErrorHandler()], "strict": [VaspErrorHandler(), MeshSymmetryErrorHandler(), UnconvergedErrorHandler(), NonConvergingErrorHandler(),PotimErrorHandler(), PositiveEnergyErrorHandler(), FrozenJobErrorHandler(), StdErrHandler(), AliasingErrorHandler(), DriftErrorHandler()], "md": [VaspErrorHandler(), NonConvergingErrorHandler()], "no_handler": [] } vasp_cmd = env_chk(self["vasp_cmd"], fw_spec) if isinstance(vasp_cmd, six.string_types): vasp_cmd = os.path.expandvars(vasp_cmd) vasp_cmd = shlex.split(vasp_cmd) # initialize variables scratch_dir = env_chk(self.get("scratch_dir"), fw_spec) gzip_output = self.get("gzip_output", True) max_errors = self.get("max_errors", 5) auto_npar = env_chk(self.get("auto_npar"), fw_spec, strict=False, default=False) gamma_vasp_cmd = env_chk(self.get("gamma_vasp_cmd"), fw_spec, strict=False, default=None) jobs = [VaspJob(vasp_cmd, auto_npar=auto_npar, gamma_vasp_cmd=gamma_vasp_cmd)] # construct handlers handlers = handler_groups[self.get("handler_group", "default")] validators = [] c = Custodian(handlers, jobs, validators=validators, max_errors=max_errors, scratch_dir=scratch_dir, gzipped_output=gzip_output) c.run()
def run(self, job_cmd=None): """ run the vasp jobs through custodian if the job list is empty, run a single job with the initial input set """ for j in self.jobs: if job_cmd is not None: j.job_cmd = job_cmd else: j.job_cmd = self.job_cmd c_params = { 'jobs': [j.as_dict() for j in self.jobs], 'handlers': [h.as_dict() for h in self.handlers], 'max_errors': 5 } c = Custodian(self.handlers, self.jobs, max_errors=5) c.run() for j in self.jobs: self.cal_log.append({ "job": j.as_dict(), 'job_id': j.job_id, "corrections": [], 'final_energy': None }) self.job_ids.append(j.job_id) if self.checkpoint_file: dumpfn(self.cal_log, self.checkpoint_file, cls=MontyEncoder, indent=4) else: dumpfn(self.cal_log, Calibrate.LOG_FILE, cls=MontyEncoder, indent=4)
def run_task(self, fw_spec): continuation = self.get('continuation', False) # TODO: detour the firework pending the result c = Custodian([ATATWalltimeHandler()], [ATATInfDetJob(continuation=continuation)], monitor_freq=1, polling_time_step=300) cust_result = c.run() if len(cust_result[0]['corrections']) > 0: # we hit the walltime handler, detour another ID Firework os.remove('stop') from dfttk.fworks import InflectionDetectionFW from fireworks import Workflow # we have to add the calc locs for this calculation by hand # because the detour action seems to disable spec mods infdet_wf = Workflow([InflectionDetectionFW(Structure.from_file('POSCAR'), continuation=True, spec={'calc_locs': extend_calc_locs(self.get('name', 'InfDet'), fw_spec)})]) return FWAction(detours=[infdet_wf])
def test_validators(self): njobs = 100 params = {"initial": 0, "total": 0} c = Custodian([ExampleHandler(params)], [ExampleJob(i, params) for i in range(njobs)], [ExampleValidator1()], max_errors=njobs) output = c.run() self.assertEqual(len(output), njobs) njobs = 100 params = {"initial": 0, "total": 0} c = Custodian([ExampleHandler(params)], [ExampleJob(i, params) for i in range(njobs)], [ExampleValidator2()], max_errors=njobs) self.assertRaises(RuntimeError, c.run)
def run_task(self, fw_spec): # write a file containing the formula and task_type for somewhat # easier file system browsing self._write_formula_file(fw_spec) # TODO: make this better - is there a way to load an environment # variable as the VASP_EXE? if 'nid' in socket.gethostname(): # hopper compute nodes # TODO: can base ncores on FW_submit.script v_exe = shlex.split('aprun -n 48 vasp') gv_exe = shlex.split('aprun -n 48 gvasp') print 'running on HOPPER' elif 'c' in socket.gethostname(): # mendel compute nodes # TODO: can base ncores on FW_submit.script v_exe = shlex.split('mpirun -n 32 vasp') gv_exe = shlex.split('aprun -n 32 gvasp') print 'running on MENDEL' else: raise ValueError('Unrecognized host!') for job in self.jobs: job.vasp_cmd = v_exe job.gamma_vasp_cmd = gv_exe logging.basicConfig(level=logging.DEBUG) c = Custodian(self.handlers, self.jobs, self.max_errors) custodian_out = c.run() all_errors = set() for run in custodian_out: for correction in run['corrections']: all_errors.update(correction['errors']) stored_data = {'error_list': list(all_errors)} update_spec = { 'prev_vasp_dir': get_block_part(os.getcwd()), 'prev_task_type': fw_spec['task_type'], 'mpsnl': fw_spec['mpsnl'], 'snlgroup_id': fw_spec['snlgroup_id'], 'run_tags': fw_spec['run_tags'] } return FWAction(stored_data=stored_data, update_spec=update_spec)
def run_task(self, fw_spec): # write a file containing the formula and task_type for somewhat # easier file system browsing self._write_formula_file(fw_spec) # TODO: make this better - is there a way to load an environment # variable as the VASP_EXE? if 'nid' in socket.gethostname(): # hopper compute nodes # TODO: can base ncores on FW_submit.script v_exe = shlex.split('aprun -n 48 vasp') gv_exe = shlex.split('aprun -n 48 gvasp') print 'running on HOPPER' elif 'c' in socket.gethostname(): # mendel compute nodes # TODO: can base ncores on FW_submit.script v_exe = shlex.split('mpirun -n 32 vasp') gv_exe = shlex.split('aprun -n 32 gvasp') print 'running on MENDEL' else: raise ValueError('Unrecognized host!') for job in self.jobs: job.vasp_cmd = v_exe job.gamma_vasp_cmd = gv_exe logging.basicConfig(level=logging.DEBUG) c = Custodian(self.handlers, self.jobs, self.max_errors) custodian_out = c.run() all_errors = set() for run in custodian_out: for correction in run['corrections']: all_errors.update(correction['errors']) stored_data = {'error_list': list(all_errors)} update_spec = {'prev_vasp_dir': get_block_part(os.getcwd()), 'prev_task_type': fw_spec['task_type'], 'mpsnl': fw_spec['mpsnl'], 'snlgroup_id': fw_spec['snlgroup_id'], 'run_tags': fw_spec['run_tags']} return FWAction(stored_data=stored_data, update_spec=update_spec)
def test_validators(self): njobs = 100 params = {"initial": 0, "total": 0} c = Custodian( [ExampleHandler(params)], [ExampleJob(i, params) for i in range(njobs)], [ExampleValidator1()], max_errors=njobs, ) output = c.run() self.assertEqual(len(output), njobs) njobs = 100 params = {"initial": 0, "total": 0} v = ExampleValidator2() c = Custodian( [ExampleHandler(params)], [ExampleJob(i, params) for i in range(njobs)], [v], max_errors=njobs, ) self.assertRaises(ValidationError, c.run) self.assertEqual(c.run_log[-1]["validator"], v)
def run_task(self, fw_spec): """ Required Parameters: dir (str path): directory containing the vasp inputs jobs (VaspJob): Contains the cmd needed to run vasp Optional Parameters: custodian_params (dict **kwargs): Contains the job and the scratch directory for a custodian run handlers (list of custodian handlers): Defaults to empty list """ dec = MontyDecoder() dir = dec.process_decoded(self['dir']) cwd = dec.process_decoded(self['cwd']) # Change to the directory with the vasp inputs to run custodian os.chdir(cwd+dir) handlers = dec.process_decoded(self.get('handlers', [])) jobs = dec.process_decoded(self['jobs']) max_errors = dec.process_decoded(self['max_errors']) fw_env = fw_spec.get("_fw_env", {}) cust_params = self.get("custodian_params", {}) # Get the scratch directory if fw_env.get('scratch_root'): cust_params['scratch_dir'] = os.path.expandvars( fw_env['scratch_root']) c = Custodian(handlers=handlers, jobs=jobs, max_errors=max_errors, gzipped_output=True, **cust_params) output = c.run() return FWAction(stored_data=output)
def run_task(self, fw_spec): dec = MontyDecoder() jobs = dec.process_decoded(self["jobs"]) fw_env = fw_spec.get("_fw_env", {}) #Override VASP and gamma VASP commands using fw_env if fw_env.get("vasp_cmd"): for j in jobs: j.vasp_cmd = os.path.expandvars(fw_env["vasp_cmd"]) j.gamma_vasp_cmd = j.gamma_vasp_cmd logging.info("Vasp command is {}".format(j.vasp_cmd)) if fw_env.get("gamma_vasp_cmd"): for j in jobs: j.gamma_vasp_cmd = os.path.expandvars(fw_env["gamma_vasp_cmd"]) logging.info("Vasp gamma command is {}".format( j.gamma_vasp_cmd)) #Override custodian scratch dir. cust_params = self.get("custodian_params", {}) if fw_env.get("scratch_root"): cust_params["scratch_dir"] = os.path.expandvars( fw_env["scratch_root"]) logging.info("Running with custodian params %s" % cust_params) handlers = [ VaspErrorHandler(), MeshSymmetryErrorHandler(), UnconvergedErrorHandler(), NonConvergingErrorHandler(), PotimErrorHandler() ] validators = [VasprunXMLValidator()] c = Custodian(handlers=[h.as_dict() for h in handlers], jobs=jobs, validators=[v.as_dict() for v in validators], **cust_params) output = c.run() return FWAction(stored_data=output)
def run_qchem(cls, qcinp, implicit_solvent, mixed_aux_basis, mixed_basis, input_file="mol.qcinp", output_file="mol.qcout", gzipped=True): mol = qcinp.jobs[0].mol num_atoms = len(mol) for qj in qcinp.jobs: if qj.params["rem"]["jobtype"] != "sp": if mixed_basis is not None: qj.set_basis_set(mixed_basis) if mixed_aux_basis is not None: qj.set_aux_basis_set(mixed_aux_basis) prev_qchem_dir = os.getcwd() qc_exe, half_cpus_cmd, openmp_cmd = cls.get_qchem_cmd(qcinp, mol) logging.basicConfig(level=logging.INFO) qchem_logger = logging.getLogger('QChemDrone') qchem_logger.setLevel(logging.INFO) sh = logging.StreamHandler(stream=sys.stdout) sh.setLevel(getattr(logging, 'INFO')) qchem_logger.addHandler(sh) scf_max_cycles = 200 geom_max_cycles = 200 alt_cmd = {"half_cpus": half_cpus_cmd, "openmp": openmp_cmd} if cls._is_openmp_only_job(qcinp): qc_exe = openmp_cmd alt_cmd["half_cpus"] = shlex.split(" ".join(half_cpus_cmd).replace( "-np", "-nt")) alt_cmd.pop("openmp") if num_atoms > 50: scf_max_cycles = 300 geom_max_cycles = 500 qcinp.write_file(input_file) if implicit_solvent is not None: solvent_data = implicit_solvent.get('solvent_data', None) if solvent_data is not None: values = [ '{:.4f}'.format(solvent_data[t]) for t in ['Dielec', 'SolN', 'SolA', 'SolB', 'SolG', 'SolC', 'SolH'] ] solvent_text = ' '.join(values) with open('solvent_data', 'w') as f: f.write(solvent_text) qclog_file = os.path.splitext(output_file)[0] + ".qclog" total_physical_memory = cls.get_physical_memory() job = QchemJob(qc_exe, input_file=input_file, output_file=output_file, qclog_file=qclog_file, alt_cmd=alt_cmd, gzipped=gzipped, total_physical_memory=total_physical_memory) handler = QChemErrorHandler(qchem_job=job, input_file=input_file, output_file=output_file, scf_max_cycles=scf_max_cycles, geom_max_cycles=geom_max_cycles) c = Custodian(handlers=[handler], jobs=[job], max_errors=50) custodian_out = c.run() cls.clean_up(qcinp) return custodian_out, prev_qchem_dir
return False @property def to_dict(self): """ Similar to Jobs, ErrorHandlers should have a to_dict property that returns a JSON-serializable dict. """ return {} @staticmethod def from_dict(d): """ Similar to Jobs, ErrorHandlers should have a from_dict static property that returns the Example Handler from a JSON-serializable dict. """ return ExampleHandler() if __name__ == "__main__": import logging logging.basicConfig(level=logging.INFO) njobs = 100 params = {"initial": 0, "total": 0} c = Custodian([ExampleHandler(params)], [ExampleJob(i, params) for i in xrange(njobs)], max_errors=njobs) output = c.run() total_errors = sum([len(d["corrections"]) for d in output]) print print "Total errors = {}".format(total_errors)
correcting errors. """ return False @property def to_dict(self): """ Similar to Jobs, ErrorHandlers should have a to_dict property that returns a JSON-serializable dict. """ return {} @staticmethod def from_dict(d): """ Similar to Jobs, ErrorHandlers should have a from_dict static property that returns the Example Handler from a JSON-serializable dict. """ return ExampleHandler() if __name__ == "__main__": njobs = 100 params = {"initial": 0, "total": 0} c = Custodian([ExampleHandler(params)], [ExampleJob(i, params) for i in xrange(njobs)], max_errors=njobs) output = c.run() total_errors = sum([len(d["corrections"]) for d in output]) print print "Total errors = {}".format(total_errors)
def get_energy(i, structure: Structure, target=0.01): """ :param i: folder for structure to be placed in :param structure: Structure :param target: energy to converge to :return: energy in eV """ cwd = os.path.abspath('.') handlers = [VaspErrorHandler('vasp.log'), PositiveEnergyErrorHandler(), NonConvergingErrorHandler(nionic_steps=10, change_algo=True)] settings = [ {'dict': 'INCAR', 'action': {'_set': {'NSW': 5000, 'IOPT': 0, 'IBRION': 3, 'EDIFFG': 1e-5, 'POTIM' : 0}, }} ] folder = os.path.join(cwd, str(i).zfill(4)) if os.path.exists(folder): Poscar(structure).write_file(os.path.join(folder, 'POSCAR')) try: vasprun_above = Vasprun(os.path.join(folder, 'above', 'vasprun.xml')) vasprun_below = Vasprun(os.path.join(folder, 'below', 'vasprun.xml')) if vasprun_above.converged and vasprun_below.converged: # if vasprun_above.final_energy - vasprun_below.final_energy < target: # for f in ['WAVECAR', 'CHGCAR', 'vasprun.xml', 'CONTCAR', 'POSCAR', 'INCAR', 'KPOINTS', 'POTCAR'] # shutil.copy(os.path.join(folder, 'above', f), f with open(os.path.join(folder, 'energy.txt'), 'w') as f: f.write(str(min(vasprun_above.final_energy, vasprun_below.final_energy))) return min(vasprun_above.final_energy, vasprun_below.final_energy) except: try: if os.path.exists(os.path.join(folder, 'energy.txt')): with open(os.path.join(folder, 'energy.txt'), 'r') as f: energy = float(f.read().split()[0]) return energy else: shutil.copy('INCAR', os.path.join(folder, 'INCAR')) vasprun = Vasprun(os.path.join(folder, 'vasprun.xml')) with open(os.path.join(folder, 'energy.txt'), 'w') as f: f.write(str(vasprun.final_energy)) return vasprun.final_energy except: pass else: os.mkdir(folder) above = None below = None for dir in [dir for dir in os.listdir(cwd) if os.path.isdir(os.path.join(cwd, dir))]: try: dir_i = int(dir) if i == dir_i: pass if dir_i > i: if above is None: above = dir_i elif dir_i - i < above - i: above = dir_i elif dir_i < i: if below is None: below = dir_i elif dir_i - i < below - i: below = dir_i elif closest is None: closest = dir elif abs(i - int(closest)) >= abs(i - int(dir)): closest = dir except: pass same_wfxns = 0 for dir_i, dir in [(str(above).zfill(4), 'above'), (str(below).zfill(4), 'below')]: try: vasprun = Vasprun(os.path.join(folder, dir, 'vasprun.xml')) if vasprun.converged: pass else: raise Exception('Not Converged') except: os.makedirs(os.path.join(folder, dir), exist_ok=True) if not os.path.exists(os.path.join(folder, dir, 'WAVECAR')): try: shutil.copy(os.path.join(dir_i, 'WAVECAR'), os.path.join(folder, dir, 'WAVECAR')) shutil.copy(os.path.join(dir_i, 'CHGCAR'), os.path.join(folder, dir, 'CHGCAR')) except: if os.path.exists(os.path.join(dir_i, 'above', 'vasprun.xml')) and \ os.path.exists(os.path.join(dir_i, 'below', 'vasprun.xml')): vasprun_above = Vasprun(os.path.join(dir_i, 'above', 'vasprun.xml')) vasprun_below = Vasprun(os.path.join(dir_i, 'below', 'vasprun.xml')) if vasprun_above.final_energy < vasprun_below.final_energy: lowest_dir = 'above' else: lowest_dir = 'below' shutil.copy(os.path.join(dir_i, lowest_dir, 'WAVECAR'), os.path.join(folder, dir, 'WAVECAR')) shutil.copy(os.path.join(dir_i, lowest_dir, 'CHGCAR'), os.path.join(folder, dir, 'CHGCAR')) if vasprun_above.final_energy - vasprun_below.final_energy < target: same_wfxns += 1 if same_wfxns == 2: logging.info('Wavefunctions are the same') if os.path.exists(os.path.join(folder, dir, 'below')): shutil.rmtree(os.path.join(folder, dir, 'below')) shutil.copytree(os.path.join(folder, dir, 'above'), os.path.join(folder, dir, 'below')) else: shutil.copy('INCAR', os.path.join(folder, dir, 'INCAR')) shutil.copy('KPOINTS', os.path.join(folder, dir, 'KPOINTS')) shutil.copy('POTCAR', os.path.join(folder, dir, 'POTCAR')) os.chdir(folder) os.chdir(dir) Poscar(structure).write_file('POSCAR') incar = Incar.from_file('INCAR') if 'AUTO_GAMMA' in incar and incar['AUTO_GAMMA']: vasp = os.environ['VASP_GAMMA'] else: vasp = os.environ['VASP_KPTS'] incar.write_file('INCAR') if os.environ['VASP_MPI'] == 'srun': j = StandardJob([os.environ['VASP_MPI'], vasp], 'vasp.log', auto_npar=False, final=True, settings_override=settings) else: j = StandardJob([os.environ['VASP_MPI'], '-np', os.environ['PBS_NP'], vasp], 'vasp.log', auto_npar=False, final=True, settings_override=settings) c = Custodian(handlers, [j], max_errors=10) c.run() os.chdir(cwd) return get_energy(i, structure)
def run_task(self, fw_spec): # write a file containing the formula and task_type for somewhat # easier file system browsing self._write_formula_file(fw_spec) fw_env = fw_spec.get("_fw_env", {}) if "mpi_cmd" in fw_env: mpi_cmd = fw_spec["_fw_env"]["mpi_cmd"] elif which("mpirun"): mpi_cmd = "mpirun" elif which("aprun"): mpi_cmd = "aprun" else: raise ValueError("No MPI command found!") # TODO: last two env vars, i.e. SGE and LoadLeveler, are untested env_vars = ['PBS_NP', 'SLURM_NTASKS', 'NSLOTS', 'LOADL_TOTAL_TASKS'] nproc = None for env_var in env_vars: nproc = os.environ.get(env_var, None) if nproc is not None: break if nproc is None: raise ValueError("None of the env vars {} found to set nproc!".format(env_vars)) fw_data = FWData() if (not fw_data.MULTIPROCESSING) or (fw_data.NODE_LIST is None): if "srun" in mpi_cmd: mpi_cmd += " -v" v_exe = shlex.split('{} -n {} {}'.format(mpi_cmd, nproc, fw_env.get("vasp_cmd", "vasp"))) gv_exe = shlex.split('{} -n {} {}'.format(mpi_cmd, nproc, fw_env.get("gvasp_cmd", "gvasp"))) else: v_exe, gv_exe = self._get_vasp_cmd_in_job_packing(fw_data, fw_env, mpi_cmd) print('host:', os.environ['HOSTNAME']) stderr_file = "std_err.txt" for job in self.jobs: job.vasp_cmd = v_exe job.gamma_vasp_cmd = gv_exe job.stderr_file = stderr_file if v_exe[0] == "srun": scancel_terminator = ScancelJobStepTerminator(stderr_file) terminate_func = scancel_terminator.cancel_job_step else: terminate_func = None incar_errors = check_incar(fw_spec['task_type']) if incar_errors: raise ValueError("Critical error: INCAR does not pass checks: {}".format(incar_errors)) logging.basicConfig(level=logging.DEBUG) c = Custodian(self.handlers, self.jobs, max_errors=self.max_errors, gzipped_output=False, validators=[VasprunXMLValidator()], terminate_func=terminate_func) # manual gzip custodian_out = c.run() if self.gzip_output: for f in os.listdir(os.getcwd()): if not f.lower().endswith("gz") and not f.endswith(".OU") and not f.endswith(".ER"): with open(f, 'rb') as f_in, \ GzipFile('{}.gz'.format(f), 'wb') as f_out: f_out.writelines(f_in) os.remove(f) all_errors = set() for run in custodian_out: for correction in run['corrections']: all_errors.update(correction['errors']) stored_data = {'error_list': list(all_errors)} update_spec = {'prev_vasp_dir': os.getcwd(), 'prev_task_type': fw_spec['task_type'], 'mpsnl': fw_spec['mpsnl'], 'snlgroup_id': fw_spec['snlgroup_id'], 'run_tags': fw_spec['run_tags'], 'parameters': fw_spec.get('parameters')} return FWAction(stored_data=stored_data, update_spec=update_spec)
from custodian.custodian import Custodian from custodian.vasp.handlers import VaspErrorHandler, FrozenJobErrorHandler, \ UnconvergedErrorHandler, MeshSymmetryErrorHandler, MaxForceErrorHandler, \ PotimErrorHandler, NonConvergingErrorHandler, WalltimeHandler from custodian.vasp.jobs import VaspJob vasp_cmd = ['ibrun', '/home1/05018/tg843171/vasp.5.4.4_vtst/bin/vasp_std'] handlers = [FrozenJobErrorHandler(timeout=60)] jobs = [VaspJob(vasp_cmd, final=True, suffix="", auto_npar=False)] c = Custodian(handlers, jobs, max_errors=2) c.run()
def update_checkpoint(job_ids=None, jfile=None, **kwargs): """ rerun the jobs with job ids in the job_ids list. The jobs are read from the json checkpoint file, jfile. If no job_ids are given then the checkpoint file will be updated with corresponding final energy Args: job_ids: list of job ids to update or q resolve jfile: check point file """ cal_log = loadfn(jfile, cls=MontyDecoder) cal_log_new = [] all_jobs = [] run_jobs = [] handlers = [] final_energy = None incar = None kpoints = None qadapter = None # if updating the specs of the job for k, v in kwargs.items(): if k == 'incar': incar = v if k == 'kpoints': kpoints = v if k == 'que': qadapter = v for j in cal_log: job = j["job"] job.job_id = j['job_id'] all_jobs.append(job) if job_ids and (j['job_id'] in job_ids or job.job_dir in job_ids): logger.info('setting job {0} in {1} to rerun'.format( j['job_id'], job.job_dir)) contcar_file = job.job_dir + os.sep + 'CONTCAR' poscar_file = job.job_dir + os.sep + 'POSCAR' if os.path.isfile(contcar_file) and len( open(contcar_file).readlines()) != 0: logger.info('setting poscar file from {}'.format(contcar_file)) job.vis.poscar = Poscar.from_file(contcar_file) else: logger.info('setting poscar file from {}'.format(poscar_file)) job.vis.poscar = Poscar.from_file(poscar_file) if incar: logger.info('incar overridden') job.vis.incar = incar if kpoints: logger.info('kpoints overridden') job.vis.kpoints = kpoints if qadapter: logger.info('qadapter overridden') job.vis.qadapter = qadapter run_jobs.append(job) if run_jobs: c = Custodian(handlers, run_jobs, max_errors=5) c.run() for j in all_jobs: final_energy = j.get_final_energy() cal_log_new.append({ "job": j.as_dict(), 'job_id': j.job_id, "corrections": [], 'final_energy': final_energy }) dumpfn(cal_log_new, jfile, cls=MontyEncoder, indent=4)
def update_checkpoint(job_ids=None, jfile=None, **kwargs): """ rerun the jobs with job ids in the job_ids list. The jobs are read from the json checkpoint file, jfile. If no job_ids are given then the checkpoint file will be updated with corresponding final energy Args: job_ids: list of job ids to update or q resolve jfile: check point file """ cal_log = loadfn(jfile, cls=MontyDecoder) cal_log_new = [] all_jobs = [] run_jobs = [] handlers = [] final_energy = None incar = None kpoints = None qadapter = None #if updating the specs of the job for k, v in kwargs.items(): if k == 'incar': incar = v if k == 'kpoints': kpoints = v if k == 'que': qadapter = v for j in cal_log: job = j["job"] job.job_id = j['job_id'] all_jobs.append(job) if job_ids and (j['job_id'] in job_ids or job.job_dir in job_ids): logger.info('setting job {0} in {1} to rerun'.format(j['job_id'], job.job_dir)) contcar_file = job.job_dir+os.sep+'CONTCAR' poscar_file = job.job_dir+os.sep+'POSCAR' if os.path.isfile(contcar_file) and len(open(contcar_file).readlines()) != 0 : logger.info('setting poscar file from {}' .format(contcar_file)) job.vis.poscar = Poscar.from_file(contcar_file) else: logger.info('setting poscar file from {}' .format(poscar_file)) job.vis.poscar = Poscar.from_file(poscar_file) if incar: logger.info('incar overridden') job.vis.incar = incar if kpoints: logger.info('kpoints overridden') job.vis.kpoints = kpoints if qadapter: logger.info('qadapter overridden') job.vis.qadapter = qadapter run_jobs.append(job) if run_jobs: c = Custodian(handlers, run_jobs, max_errors=5) c.run() for j in all_jobs: final_energy = j.get_final_energy() cal_log_new.append({"job": j.as_dict(), 'job_id': j.job_id, "corrections": [], 'final_energy': final_energy}) dumpfn(cal_log_new, jfile, cls=MontyEncoder, indent=4)
#!/usr/bin/env python # -*- coding: utf-8 -*- """ staticのDFT計算を行う """ from custodian.custodian import Custodian from custodian.vasp.handlers import UnconvergedErrorHandler from custodian.vasp.handlers import VaspErrorHandler from custodian.vasp.jobs import VaspJob #from mycustodian import myVaspErrorHandler print("run custodian_static.py") print("running...") handlers = [VaspErrorHandler()] vasp_cmd = ['mpirun', '-n', '$nCores', '/opt/vasp5n/vasp.5.2/vasp'] jobs = [VaspJob(vasp_cmd, final=True, backup=False, suffix="", auto_npar=False, gzipped=False)] c = Custodian(handlers, jobs, max_errors=3) c.run() print("finish custodian_static.py")
def run_task(self, fw_spec): # write a file containing the formula and task_type for somewhat # easier file system browsing self._write_formula_file(fw_spec) fw_env = fw_spec.get("_fw_env", {}) if "mpi_cmd" in fw_env: mpi_cmd = fw_spec["_fw_env"]["mpi_cmd"] elif which("mpirun"): mpi_cmd = "mpirun" elif which("aprun"): mpi_cmd = "aprun" else: raise ValueError("No MPI command found!") # TODO: last two env vars, i.e. SGE and LoadLeveler, are untested env_vars = ['PBS_NP', 'SLURM_NTASKS', 'NSLOTS', 'LOADL_TOTAL_TASKS'] for env_var in env_vars: nproc = os.environ.get(env_var, None) if nproc is not None: break if nproc is None: raise ValueError("None of the env vars {} found to set nproc!".format(env_vars)) v_exe = shlex.split('{} -n {} {}'.format(mpi_cmd, nproc, fw_env.get("vasp_cmd", "vasp"))) gv_exe = shlex.split('{} -n {} {}'.format(mpi_cmd, nproc, fw_env.get("gvasp_cmd", "gvasp"))) print 'host:', os.environ['HOSTNAME'] for job in self.jobs: job.vasp_cmd = v_exe job.gamma_vasp_cmd = gv_exe incar_errors = check_incar(fw_spec['task_type']) if incar_errors: raise ValueError("Critical error: INCAR does not pass checks: {}".format(incar_errors)) logging.basicConfig(level=logging.DEBUG) c = Custodian(self.handlers, self.jobs, max_errors=self.max_errors, gzipped_output=False, validators=[VasprunXMLValidator()]) # manual gzip custodian_out = c.run() if self.gzip_output: for f in os.listdir(os.getcwd()): if not f.lower().endswith("gz") and not f.endswith(".OU") and not f.endswith(".ER"): with open(f, 'rb') as f_in, \ GzipFile('{}.gz'.format(f), 'wb') as f_out: f_out.writelines(f_in) os.remove(f) all_errors = set() for run in custodian_out: for correction in run['corrections']: all_errors.update(correction['errors']) stored_data = {'error_list': list(all_errors)} update_spec = {'prev_vasp_dir': os.getcwd(), 'prev_task_type': fw_spec['task_type'], 'mpsnl': fw_spec['mpsnl'], 'snlgroup_id': fw_spec['snlgroup_id'], 'run_tags': fw_spec['run_tags'], 'parameters': fw_spec.get('parameters')} return FWAction(stored_data=stored_data, update_spec=update_spec)