def test_static_constructors(self): kpoints = Kpoints.gamma_automatic([3, 3, 3], [0, 0, 0]) self.assertEqual(kpoints.style, Kpoints.supported_modes.Gamma) self.assertEqual(kpoints.kpts, [[3, 3, 3]]) kpoints = Kpoints.monkhorst_automatic([2, 2, 2], [0, 0, 0]) self.assertEqual(kpoints.style, Kpoints.supported_modes.Monkhorst) self.assertEqual(kpoints.kpts, [[2, 2, 2]]) kpoints = Kpoints.automatic(100) self.assertEqual(kpoints.style, Kpoints.supported_modes.Automatic) self.assertEqual(kpoints.kpts, [[100]]) filepath = os.path.join(test_dir, "POSCAR") poscar = Poscar.from_file(filepath) kpoints = Kpoints.automatic_density(poscar.structure, 500) self.assertEqual(kpoints.kpts, [[2, 4, 4]]) self.assertEqual(kpoints.style, Kpoints.supported_modes.Monkhorst) kpoints = Kpoints.automatic_density(poscar.structure, 500, True) self.assertEqual(kpoints.style, Kpoints.supported_modes.Gamma) kpoints = Kpoints.automatic_density_by_vol(poscar.structure, 1000) self.assertEqual(kpoints.kpts, [[6, 11, 13]]) self.assertEqual(kpoints.style, Kpoints.supported_modes.Gamma) s = poscar.structure s.make_supercell(3) kpoints = Kpoints.automatic_density(s, 500) self.assertEqual(kpoints.kpts, [[1, 1, 1]]) self.assertEqual(kpoints.style, Kpoints.supported_modes.Gamma)
def test_static_constructors(self): kpoints = Kpoints.gamma_automatic([3, 3, 3], [0, 0, 0]) self.assertEqual(kpoints.style, Kpoints.supported_modes.Gamma) self.assertEqual(kpoints.kpts, [[3, 3, 3]]) kpoints = Kpoints.monkhorst_automatic([2, 2, 2], [0, 0, 0]) self.assertEqual(kpoints.style, Kpoints.supported_modes.Monkhorst) self.assertEqual(kpoints.kpts, [[2, 2, 2]]) kpoints = Kpoints.automatic(100) self.assertEqual(kpoints.style, Kpoints.supported_modes.Automatic) self.assertEqual(kpoints.kpts, [[100]]) filepath = self.TEST_FILES_DIR / 'POSCAR' poscar = Poscar.from_file(filepath) kpoints = Kpoints.automatic_density(poscar.structure, 500) self.assertEqual(kpoints.kpts, [[1, 3, 3]]) self.assertEqual(kpoints.style, Kpoints.supported_modes.Gamma) kpoints = Kpoints.automatic_density(poscar.structure, 500, True) self.assertEqual(kpoints.style, Kpoints.supported_modes.Gamma) kpoints = Kpoints.automatic_density_by_vol(poscar.structure, 1000) self.assertEqual(kpoints.kpts, [[6, 10, 13]]) self.assertEqual(kpoints.style, Kpoints.supported_modes.Gamma) s = poscar.structure s.make_supercell(3) kpoints = Kpoints.automatic_density(s, 500) self.assertEqual(kpoints.kpts, [[1, 1, 1]]) self.assertEqual(kpoints.style, Kpoints.supported_modes.Gamma) kpoints = Kpoints.from_string("""k-point mesh 0 G 10 10 10 0.5 0.5 0.5 """) self.assertArrayAlmostEqual(kpoints.kpts_shift, [0.5, 0.5, 0.5])
def test_static_constructors(self): kpoints = Kpoints.gamma_automatic([3, 3, 3], [0, 0, 0]) self.assertEqual(kpoints.style, Kpoints.supported_modes.Gamma) self.assertEqual(kpoints.kpts, [[3, 3, 3]]) kpoints = Kpoints.monkhorst_automatic([2, 2, 2], [0, 0, 0]) self.assertEqual(kpoints.style, Kpoints.supported_modes.Monkhorst) self.assertEqual(kpoints.kpts, [[2, 2, 2]]) kpoints = Kpoints.automatic(100) self.assertEqual(kpoints.style, Kpoints.supported_modes.Automatic) self.assertEqual(kpoints.kpts, [[100]]) filepath = os.path.join(test_dir, 'POSCAR') poscar = Poscar.from_file(filepath) kpoints = Kpoints.automatic_density(poscar.structure, 500) self.assertEqual(kpoints.kpts, [[2, 4, 4]]) self.assertEqual(kpoints.style, Kpoints.supported_modes.Monkhorst) kpoints = Kpoints.automatic_density(poscar.structure, 500, True) self.assertEqual(kpoints.style, Kpoints.supported_modes.Gamma) kpoints = Kpoints.automatic_density_by_vol(poscar.structure, 1000) self.assertEqual(kpoints.kpts, [[6, 11, 13]]) self.assertEqual(kpoints.style, Kpoints.supported_modes.Gamma) s = poscar.structure s.make_supercell(3) kpoints = Kpoints.automatic_density(s, 500) self.assertEqual(kpoints.kpts, [[1, 1, 1]]) self.assertEqual(kpoints.style, Kpoints.supported_modes.Gamma)
def get_kpoints_object(self, step, structure): try: kpoints_tags = self.kpoints[step] except KeyError: return None if kpoints_tags['Type'] == 'automatic_density': K = Kpoints.automatic_density(structure, kpoints_tags['Grid Density'], kpoints_tags['Force Gamma']) elif kpoints_tags['Type'] == 'automatic_density_by_vol': K = Kpoints.automatic_density_by_vol( structure, kpoints_tags['Grid Density per A^(-3) of Reciprocal Cell'], kpoints_tags['Force Gamma']) elif kpoints_tags['Type'] == 'automatic_gamma_density': K = Kpoints.automatic_gamma_density(structure, kpoints_tags['Grid Density']) elif kpoints_tags['Type'] == 'gamma_automatic': K = Kpoints.gamma_automatic(kpoints_tags["KPTS"], kpoints_tags["Shift"]) elif kpoints_tags['Type'] == 'monkhorst_automatic': K = Kpoints.monkhorst_automatic(kpoints_tags["KPTS"], kpoints_tags["Shift"]) else: print('Invalid kpoints generation type %s; fatal error' % kpoints_tags['Type']) sys.exit(1) return K
def test_static_constructors(self): kpoints = Kpoints.gamma_automatic([3, 3, 3], [0, 0, 0]) self.assertEqual(kpoints.style, Kpoints.supported_modes.Gamma) self.assertEqual(kpoints.kpts, [[3, 3, 3]]) kpoints = Kpoints.monkhorst_automatic([2, 2, 2], [0, 0, 0]) self.assertEqual(kpoints.style, Kpoints.supported_modes.Monkhorst) self.assertEqual(kpoints.kpts, [[2, 2, 2]]) kpoints = Kpoints.automatic(100) self.assertEqual(kpoints.style, Kpoints.supported_modes.Automatic) self.assertEqual(kpoints.kpts, [[100]]) filepath = os.path.join(test_dir, 'POSCAR') poscar = Poscar.from_file(filepath) kpoints = Kpoints.automatic_density(poscar.structure, 500) self.assertEqual(kpoints.kpts, [[1, 3, 3]]) self.assertEqual(kpoints.style, Kpoints.supported_modes.Gamma) kpoints = Kpoints.automatic_density(poscar.structure, 500, True) self.assertEqual(kpoints.style, Kpoints.supported_modes.Gamma) kpoints = Kpoints.automatic_density_by_vol(poscar.structure, 1000) self.assertEqual(kpoints.kpts, [[6, 10, 13]]) self.assertEqual(kpoints.style, Kpoints.supported_modes.Gamma) s = poscar.structure s.make_supercell(3) kpoints = Kpoints.automatic_density(s, 500) self.assertEqual(kpoints.kpts, [[1, 1, 1]]) self.assertEqual(kpoints.style, Kpoints.supported_modes.Gamma) kpoints = Kpoints.from_string("""k-point mesh 0 G 10 10 10 0.5 0.5 0.5 """) self.assertArrayAlmostEqual(kpoints.kpts_shift, [0.5, 0.5, 0.5])
def Auto_Kpoints(mat=None, length=20): """ Geting Kpoints object from structure and line-density Args: mat: Poscar object with structure information length: line-density Returns: kpp: Kpoint object """ b1 = LA.norm( np.array(mat.structure.lattice.reciprocal_lattice_crystallographic. matrix[0])) b2 = LA.norm( np.array(mat.structure.lattice.reciprocal_lattice_crystallographic. matrix[1])) b3 = LA.norm( np.array(mat.structure.lattice.reciprocal_lattice_crystallographic. matrix[2])) n1 = int(max(1, length * b1 + 0.5)) n2 = int(max(1, length * b2 + 0.5)) n3 = int(max(1, length * b3 + 0.5)) kpp = Kpoints.gamma_automatic(kpts=(n1, n2, n3)) return kpp
def kpoints(self): """ Sets up the k-points for the static calculation. Returns: :class: pymatgen.io.vasp.inputs.Kpoints """ settings = self.user_kpoints_settings or self._config_dict["KPOINTS"] if "k_resolution" in settings: # Use k_resolution to calculate kpoints k_kpoint_resolution = settings["k_resolution"] kpt_divisions = [ int(np.ceil(l / k_kpoint_resolution)) for l in self.structure.lattice.reciprocal_lattice.lengths ] return Kpoints.gamma_automatic(kpts=kpt_divisions) elif "gamma_density" in settings: return Kpoints.automatic_density_by_vol( self.structure, int(settings['gamma_density']), force_gamma=True) else: return super().kpoints
def get_kpoints(self, structure): """ Writes out a KPOINTS file using the automated gamma grid method. VASP crashes GW calculations on none gamma centered meshes. """ if self.sort_structure: structure = structure.get_sorted_structure() dens = int(self.kpoints_settings['grid_density']) if dens == 1: return Kpoints.gamma_automatic() else: return Kpoints.automatic_gamma_density(structure, dens)
def kpoints(self): self._config_dict["KPOINTS"]["grid_density"] = self.grid_density kpoints = super(PRLRoughStaticSet, self).kpoints # Prefer to use k-point scheme from previous run if self.prev_kpoints and self.prev_kpoints.style != kpoints.style: if self.prev_kpoints.style == Kpoints.supported_modes.Monkhorst: k_div = [ kp + 1 if kp % 2 == 1 else kp for kp in kpoints.kpts[0] ] kpoints = Kpoints.monkhorst_automatic(k_div) else: kpoints = Kpoints.gamma_automatic(kpoints.kpts[0]) return kpoints
def set_kpoints(self, kpoint): """ set the kpoint """ if self.Grid_type == 'M': self.kpoints = Kpoints.monkhorst_automatic(kpts=kpoint) elif self.Grid_type == 'A': self.kpoints = Kpoints.automatic(subdivisions=kpoint) elif self.Grid_type == 'G': self.kpoints = Kpoints.gamma_automatic(kpts=kpoint) elif self.Grid_type == '3DD': self.kpoints = Kpoints.automatic_density_by_vol(structure= \ self.poscar.structure, kppvol=kpoint) elif self.Grid_type == 'band': self.kpoints = Kpoints.automatic_linemode(divisions=kpoint, \ ibz=HighSymmKpath(self.poscar.structure))
def set_kpoints(self, kpoint): """ set the kpoint """ if self.Grid_type == 'M': self.kpoints = Kpoints.monkhorst_automatic(kpts=kpoint) elif self.Grid_type == 'A': self.kpoints = Kpoints.automatic(subdivisions=kpoint) elif self.Grid_type == 'G': self.kpoints = Kpoints.gamma_automatic(kpts=kpoint) elif self.Grid_type == '3DD': self.kpoints = Kpoints.automatic_density_by_vol(structure= \ self.poscar.structure, kppvol=kpoint) elif self.Grid_type == 'band': self.kpoints = Kpoints.automatic_linemode(divisions=kpoint, \ ibz=HighSymmKpath( self.poscar.structure))
def set_kpoints(self, kpoint): """ set the kpoint """ if self.Grid_type == 'M': self.kpoints = Kpoints.monkhorst_automatic(kpts = kpoint) elif self.Grid_type == 'A': self.kpoints = Kpoints.automatic(subdivisions = kpoint) elif self.Grid_type == 'G': self.kpoints = Kpoints.gamma_automatic(kpts = kpoint) elif self.Grid_type == '3DD': self.kpoints = Kpoints.automatic_density_by_vol(structure=\ self.poscar.structure, kppvol=kpoint) elif self.Grid_type == 'band': self.kpoints = Kpoints.automatic_linemode(divisions=kpoint,\ ibz=HighSymmKpath(self.poscar.structure)) name = self.kpoint_to_name(kpoint, self.Grid_type) job_dir = self.job_dir +os.sep+ self.key_to_name('KPOINTS') \ + os.sep + name return job_dir
def set_kpoints(self, kpoint): """ set the kpoint """ if self.Grid_type == 'M': self.kpoints = Kpoints.monkhorst_automatic(kpts=kpoint) elif self.Grid_type == 'A': self.kpoints = Kpoints.automatic(subdivisions=kpoint) elif self.Grid_type == 'G': self.kpoints = Kpoints.gamma_automatic(kpts=kpoint) elif self.Grid_type == '3DD': self.kpoints = Kpoints.automatic_density_by_vol(structure= \ self.poscar.structure, kppvol=kpoint) elif self.Grid_type == 'band': self.kpoints = Kpoints.automatic_linemode(divisions=kpoint, \ ibz=HighSymmKpath(self.poscar.structure)) name = self.kpoint_to_name(kpoint, self.Grid_type) job_dir = self.job_dir + os.sep + self.key_to_name('KPOINTS') \ + os.sep + name return job_dir
def kpoints(self): """ Sets up the k-points for the static calculation. Returns: :class: pymatgen.io.vasp.inputs.Kpoints """ settings = self.user_kpoints_settings or self._config_dict["KPOINTS"] if "k_resolution" in settings: # Use k_resolution to calculate kpoints kpt_divisions = [ round(l / settings["k_resolution"] + 0.5) for l in self.structure.lattice.reciprocal_lattice.lengths ] return Kpoints.gamma_automatic(kpts=kpt_divisions) else: return super().kpoints
def gamma_list(cls): """ Initialize gamma grid from a list explicitly defining the number of kpoint subdivisions along the crystal axis Example:: Automatic Kpoint Scheme 0 Gamma 5 5 5 """ if cls.kpoint_params['sympath'] is not None: warnings.warn("Explicit gamma grid mode: Ignoring defined high " "symmetry path object") kpoints = cls.kpoint_params['kpoints'] if len(kpoints) != 3: raise KpointWrapperError("Expected list of length 3 for explict " "k-point grid input") shift = cls.kpoint_params['shift'] or [.0, .0, .0] if len(shift) != 3: raise KpointWrapperError("Expected list of length 3 for k-point " "grid shift") return Kpoints.gamma_automatic(kpts=kpoints, shift=shift)
def set_kpoints(self, kpoint=None, poscar=None, ibzkpth=None): """ set the kpoint """ # useful to check if a poscar is supplied from setup_poscar_jobs (most often the case) # or this is a single poscar use case if not poscar: poscar = self.poscar # splitting into two if elif branches means fewer if statements to check on # a run # Most general method of setting the k-points for # different grid types # NOTE: requires that at least one k-points value be passed # as a turn - knobs list value # this is not true for values that may be caculated out of # a database # use this part only if this is a non-database run for example # for k-points calibration if not self.database: if self.Grid_type == 'M': self.kpoints = Kpoints.monkhorst_automatic(kpts=kpoint) elif self.Grid_type == 'A': self.kpoints = Kpoints.automatic(subdivisions=kpoint) elif self.Grid_type == 'G': self.kpoints = Kpoints.gamma_automatic(kpts=kpoint) elif self.Grid_type == '3D_vol': self.kpoints = Kpoints.automatic_density_by_vol(structure=poscar.structure, kppvol=kpoint) elif self.Grid_type == 'bulk_bands_pbe': self.kpoints = Kpoints.automatic_linemode(divisions=kpoint, ibz=HighSymmKpath( poscar.structure)) elif self.Grid_type == 'D': self.kpoints = Kpoints.automatic_density(structure=poscar.structure,kppa=kpoint) elif self.Grid_type == 'Finer_G_Mesh': # kpoint is the scaling factor and self.kpoints is the old kpoint mesh self.logger.info('Setting Finer G Mesh for {0} by scale {1}'.format(kpoint, self.finer_kpoint)) self.kpoints = Kpoints.gamma_automatic(kpts = \ [i * self.finer_kpoint for i in kpoint]) self.logger.info('Finished scaling operation of k-mesh') # applicable for database runs # future constructs or settinsg can be activated via a yaml file # database yaml file or better still the input deck from its speification # decides what combination of input calibrate constructor settings to use # one of them being the grid_type tag elif self.database == 'twod': # set of kpoints settings according to the 2D database profile # the actual settings of k-points density # will in future come from any database input file set if self.Grid_type == 'hse_bands_2D_prep': kpoint_dict = Kpoints.automatic_gamma_density(poscar.structure, 200).as_dict() kpoint_dict['kpoints'][0][2] = 1 # remove z kpoints self.kpoints = Kpoints.from_dict(kpoint_dict) elif self.Grid_type == 'hse_bands_2D': # can at most return the path to the correct kpoints file # needs kpoints to be written out in instrument in a different way # not using the Kpoints object self.kpoints = get_2D_hse_kpoints(poscar.structure, ibzkpth) elif self.Grid_type == 'bands_2D': kpoint_dict = Kpoints.automatic_linemode(divisions=20, ibz=HighSymmKpath(poscar.structure)).as_dict() self.kpoints = Kpoints.from_dict(kpoint_dict) elif self.Grid_type == 'relax_2D': # general relaxation settings for 2D kpoint_dict = Kpoints.automatic_gamma_density(poscar.structure, 1000).as_dict() kpoint_dict['kpoints'][0][2] = 1 self.kpoints = Kpoints.from_dict(kpoint_dict) elif self.Grid_type == 'relax_3D': # general relaxation settings for 3D kpoint_dict = Kpoints.automatic_gamma_density( poscar.structure, 1000) self.kpoints = Kpoints.from_dict(kpoint_dict)
def get_jobs(args): """ Returns a generator of jobs. Allows of "infinite" jobs. """ vasp_command = args.command.split() # save initial INCAR for rampU runs n_ramp_u = args.jobs.count("rampU") ramps = 0 if n_ramp_u: incar = Incar.from_file("INCAR") ldauu = incar["LDAUU"] ldauj = incar["LDAUJ"] njobs = len(args.jobs) post_settings = [] # append to this list to have settings applied on next job for i, job in enumerate(args.jobs): final = i == njobs - 1 if any(c.isdigit() for c in job): suffix = "." + job else: suffix = f".{job}{i + 1}" settings = post_settings post_settings = [] backup = i == 0 copy_magmom = False vinput = VaspInput.from_directory(".") if i > 0: settings.append({"file": "CONTCAR", "action": {"_file_copy": {"dest": "POSCAR"}}}) job_type = job.lower() auto_npar = True if args.no_auto_npar: auto_npar = False if job_type.startswith("static_derived"): from pymatgen.io.vasp.sets import MPStaticSet vis = MPStaticSet.from_prev_calc( ".", user_incar_settings={"LWAVE": True, "EDIFF": 1e-6}, ediff_per_atom=False, ) settings.extend( [ {"dict": "INCAR", "action": {"_set": dict(vis.incar)}}, {"dict": "KPOINTS", "action": {"_set": vis.kpoints.as_dict()}}, ] ) if job_type.startswith("static_dielectric_derived"): from pymatgen.io.vasp.sets import ( MPStaticDielectricDFPTVaspInputSet, MPStaticSet, ) # vis = MPStaticSet.from_prev_calc( # ".", user_incar_settings={"EDIFF": 1e-6, "IBRION": 8, # "LEPSILON": True, 'LREAL':False, # "LPEAD": True, "ISMEAR": 0, # "SIGMA": 0.01}, # ediff_per_atom=False) vis = MPStaticDielectricDFPTVaspInputSet() incar = vis.get_incar(vinput["POSCAR"].structure) unset = {} for k in ["NPAR", "KPOINT_BSE", "LAECHG", "LCHARG", "LVHAR", "NSW"]: incar.pop(k, None) if k in vinput["INCAR"]: unset[k] = 1 kpoints = vis.get_kpoints(vinput["POSCAR"].structure) settings.extend( [ {"dict": "INCAR", "action": {"_set": dict(incar), "_unset": unset}}, {"dict": "KPOINTS", "action": {"_set": kpoints.as_dict()}}, ] ) auto_npar = False elif job_type.startswith("static"): m = [i * args.static_kpoint for i in vinput["KPOINTS"].kpts[0]] settings.extend( [ {"dict": "INCAR", "action": {"_set": {"NSW": 0}}}, {"dict": "KPOINTS", "action": {"_set": {"kpoints": [m]}}}, ] ) elif job_type.startswith("nonscf_derived"): from pymatgen.io.vasp.sets import MPNonSCFSet vis = MPNonSCFSet.from_prev_calc(".", copy_chgcar=False, user_incar_settings={"LWAVE": True}) settings.extend( [ {"dict": "INCAR", "action": {"_set": dict(vis.incar)}}, {"dict": "KPOINTS", "action": {"_set": vis.kpoints.as_dict()}}, ] ) elif job_type.startswith("optics_derived"): from pymatgen.io.vasp.sets import MPNonSCFSet vis = MPNonSCFSet.from_prev_calc( ".", optics=True, copy_chgcar=False, nedos=2001, mode="uniform", nbands_factor=5, user_incar_settings={ "LWAVE": True, "ALGO": "Exact", "SIGMA": 0.01, "EDIFF": 1e-6, }, ediff_per_atom=False, ) settings.extend( [ {"dict": "INCAR", "action": {"_set": dict(vis.incar)}}, {"dict": "KPOINTS", "action": {"_set": vis.kpoints.as_dict()}}, ] ) elif job_type.startswith("rampu"): f = ramps / (n_ramp_u - 1) settings.append( { "dict": "INCAR", "action": { "_set": { "LDAUJ": [j * f for j in ldauj], "LDAUU": [u * f for u in ldauu], } }, } ) copy_magmom = True ramps += 1 elif job_type.startswith("quick_relax") or job_type.startswith("quickrelax"): kpoints = vinput["KPOINTS"] incar = vinput["INCAR"] structure = vinput["POSCAR"].structure if "ISMEAR" in incar: post_settings.append({"dict": "INCAR", "action": {"_set": {"ISMEAR": incar["ISMEAR"]}}}) else: post_settings.append({"dict": "INCAR", "action": {"_unset": {"ISMEAR": 1}}}) post_settings.append({"dict": "KPOINTS", "action": {"_set": kpoints.as_dict()}}) # lattice vectors with length < 9 will get >1 KPOINT low_kpoints = Kpoints.gamma_automatic([max(int(18 / l), 1) for l in structure.lattice.abc]) settings.extend( [ {"dict": "INCAR", "action": {"_set": {"ISMEAR": 0}}}, {"dict": "KPOINTS", "action": {"_set": low_kpoints.as_dict()}}, ] ) # let vasp determine encut (will be lower than # needed for compatibility with other runs) if "ENCUT" in incar: post_settings.append({"dict": "INCAR", "action": {"_set": {"ENCUT": incar["ENCUT"]}}}) settings.append({"dict": "INCAR", "action": {"_unset": {"ENCUT": 1}}}) elif job_type.startswith("relax"): pass elif job_type.startswith("full_relax"): yield from VaspJob.full_opt_run(vasp_command) else: print(f"Unsupported job type: {job}") sys.exit(-1) if not job_type.startswith("full_relax"): yield VaspJob( vasp_command, final=final, suffix=suffix, backup=backup, settings_override=settings, copy_magmom=copy_magmom, auto_npar=auto_npar, )
def test_pickle(self): k = Kpoints.gamma_automatic() pickle.dumps(k)
def process_input(args): if args.command == 'start_project': if args.i: f = open(args.i) my_project = yaml.load(open( args.i)) ## this will be the only CLI input f.close() NAME = my_project['NAME'] INCAR_GENERAL = my_project['Incar_General'] POTCAR_SPEC = yaml.load(open(my_project['Potcar_Spec'])) MATERIALS_LIST = my_project['Insilico_Fab']['Material_List'] struct_list = [Poscar.from_file(poscar) for poscar in glob('StructsDir/*.vasp') \ if 'StructsDir' in MATERIALS_LIST] + \ [Poscar(get_struct_from_mp(p)) for p in MATERIALS_LIST \ if 'StructsDir' not in p] WORKFLOWS = my_project['Workflow'] project_log = get_logger(NAME + "_InSilico_Materials") error_handler = [VaspErrorHandler()] Order_WfNames = list(np.sort(list(WORKFLOWS['Steps'].keys()))) steps_map = {'StepVASP0': StepVASP0, 'StepVASP1': StepVASP1} steps_dict = { k: WORKFLOWS['Steps'][k]['TYPE'] for k in Order_WfNames } steps_map[steps_dict[list(steps_dict.keys())[0]]](my_project, struct_list) project_abs_dir = os.path.abspath(os.path.curdir) my_project['Project_Dir'] = project_abs_dir my_project['Running_Wflow'] = [int(Order_WfNames[0])] with open(args.i, 'w') as projfile: yaml.dump(my_project, projfile, default_flow_style=False) if os.path.exists('custodian.json'): os.remove('custodian.json') projfile.close() if args.command == 'continue_project': if args.i: f = open(args.i) my_project = yaml.load(open( args.i)) ## this will be the only CLI input f.close() NAME = my_project['NAME'] WORKFLOWS = my_project['Workflow'] #error_handler = [VaspErrorHandler()] Order_WfNames = list(np.sort(list(WORKFLOWS['Steps'].keys()))) steps_map = {'StepVASP0': StepVASP0, 'StepVASP1': StepVASP1} steps_dict = { k: WORKFLOWS['Steps'][k]['TYPE'] for k in Order_WfNames } for k in Order_WfNames: if k not in my_project['Running_Wflow']: #print (k) #print (steps_map[steps_dict[list(steps_dict.keys())[k]]]) steps_map[steps_dict[list(steps_dict.keys())[k]]]( my_project, k) #print ('Here') orig_done = my_project['Running_Wflow'] orig_done.append(k) my_project['Running_Wflow'] = [int(o) for o in orig_done] with open(args.i, 'w') as projfile: yaml.dump(my_project, projfile, default_flow_style=False) if os.path.exists('custodian.json'): os.remove('custodian.json') projfile.close() break if args.command == 'check_project': # check if any input spec for the project if args.i: f = open(args.i) project_spec = yaml.load(f) if args.c: workflow_chkpts = [args.c] else: workflow_chkpts = glob('{}*.json'.format(project_spec['NAME'])) #print (workflow_chkpts) proj_dir = project_spec['Project_Dir'] os.chdir(proj_dir) CustodianChecks=\ {chk:check_errors(chkfile=chk,logfile_name=\ 'Custodian_'+project_spec['NAME']) for chk in workflow_chkpts} with open('{}_CustodianReport.yaml'.format(project_spec['NAME']), 'w') as report: yaml.dump(CustodianChecks, report, default_flow_style=False) report.close() elif args.command == 'rerun_project': # Custodian yamls are input print(args.i, len(args.i)) if args.i: f = open(args.i) rerun_logs = get_logger('{}_reruns'.format( args.i.replace('.yaml', ''))) rerun_spec = yaml.load(f) proj_dir = os.path.abspath(os.path.curdir) if args.c: rerun_chkpts = [args.c] else: rerun_chkpts = list(rerun_spec.keys()) print(rerun_chkpts) for k in rerun_chkpts: for case in rerun_spec[k]: print('Rerunning {}'.format(case['ErrorDir'][0])) rerun_logs.info('Rerunning {}'.format(case['ErrorDir'][0])) if args.s: rerun_logs.info( 'Using new submit_file {} for all rerun'.format( args.s)) os.system('cp {0} {1}'.format(args.s, case['ErrorDir'][0])) submit_cmd = ['sbatch', args.s] else: if case['Error'] == ['Memory Error']: if args.m: rerun_logs.info( 'Error Memory adding {}'.format(args.m)) add_mem_submit_file( case['ErrorDir'][0] + '/submit_script', args.m) else: rerun_logs.info('Error Memory adding 3000') add_mem_submit_file( case['ErrorDir'][0] + '/submit_script', 3000) elif 'TIME LIMIT' in case['Error'][0]: if args.w: rerun_logs.info( 'Error TIME LIMIT adding {}'.format( args.w)) add_walltime( case['ErrorDir'][0] + '/submit_script', args.w) else: rerun_logs.info( 'Error TIME LIMIT adding 20:00:00') add_walltime( case['ErrorDir'][0] + '/submit_script', '20:00:00') submit_cmd = ['sbatch', 'submit_script'] os.chdir(case['ErrorDir'][0]) if args.inc: incar = Incar.from_file('INCAR') user_dict = ast.literal_eval(args.inc) incar.update(user_dict) incar.write_file('INCAR') if args.dinc: incar = Incar.from_file('INCAR') user_exp = ast.literal_eval(args.dinc) for d in user_exp: if d in list(incar.keys()): del incar[d] incar.write_file('INCAR') if args.kpt: user_exp = ast.literal_eval(args.kpt) if isinstance(user_exp, tuple): kpoints = Kpoints.gamma_automatic(user_exp) else: struct = Structure.from_file('POSCAR') kpoints = Kpoints.automatic_gamma_density( struct, user_exp) kpoints.write_file('KPOINTS') p = subprocess.Popen(['sbatch', 'submit_script'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = p.communicate() rerun_job_id = str(stdout).rstrip( '\n').split()[-1].replace("\\n'", '') rerun_logs.info('running job {0} in {1}'.format( rerun_job_id, case['ErrorDir'][0])) os.chdir(proj_dir) rerun_logs.info('Finished submitting reruns') print('Finished submitting reruns') elif args.command == 'analyze_project': # check for yaml analysis input for project if args.i: f = open(args.i) proj_spec = yaml.load(f) proj_wflow_st = proj_spec['Workflow']['Steps'] for step in proj_wflow_st: print(step) if 'Analysis' in list(proj_wflow_st[step].keys()): analyze_script = proj_wflow_st[step]['Analysis']['Script'] analyze_input = proj_wflow_st[step]['Analysis']['Input'] analyze_output = proj_wflow_st[step]['Analysis']['Output'] if '.py' in analyze_script: # os.system('python {0} -i {1} -o {2}'.format(analyze_script, analyze_input, analyze_output)) print(analyze_script, analyze_input, analyze_output) p = subprocess.Popen(['python',analyze_script, '-i', analyze_input,\ '-o', analyze_output], stdout=subprocess.PIPE, \ stderr=subprocess.PIPE) stdout, stderr = p.communicate() print(stdout) print( 'Analyzed the project according to specified post processing script' ) elif args.command == 'archive_project': # check for workflow.yaml input file if args.i: print( 'tar.gz the project json files and csv and vasprun.xml files from the project directory' ) f = open(args.i) proj_spec = yaml.load(f) name_spec = proj_spec['NAME'] proj_dir = proj_spec['Project_Dir'] os.chdir(proj_dir) # first the json checkpoints os.system('tar cvzf {}.tar.gz {}*.json '.format( name_spec, name_spec)) # then add all vaspruns to tar archive os.system( 'find . -iname "*.xml" -exec tar -rvf {0}.tar {} \;'.format( name_spec + '_vaspruns_csvs')) # then add csvs os.system( 'find . -iname "*.csv" -exec tar -rvf {0}.tar {} \;'.format( name_spec + '_vaspruns_csvs')) # compress the archive os.system('tar cvzf {}.tar.gz {}.tar'.format(name_spec + '_vaspruns_csvs')) # finally delete WAVECARS and CHG, CHGCARS os.system('find . -iname "WAVECAR" -exec rm {} \;') os.system('find . -iname "CHG*" -exec rm {} \;') elif args.command == 'load_settings': if args.i: user_dict = ast.literal_eval(args.i) if not os.path.exists(SETTINGS_FILE): user_configs = {key:None for key in ['username','bulk_binary','twod_binary',\ 'ncl_binary', 'sol_binary', 'custom_binary',\ 'vdw_kernel','potentials','MAPI_KEY', 'queue_system', 'queue_template']} with open(os.path.join(os.path.expanduser('~'),'.mpint_config.yaml'),'w') \ as config_file: yaml.dump(user_configs, config_file, default_flow_style=False) config_data = yaml.load(open(SETTINGS_FILE)) config_data.update(user_dict) load_config_vars(config_data) elif args.command == 'qcheck_project': states = [] if args.i: f = open(args.i) project_spec = yaml.load(f) workflow_logs = [ fi for fi in glob('{}*.log'.format(project_spec['NAME'])) if 'InSilico' not in fi ] for l in workflow_logs: states = [] print('Qcheck on {}'.format(l)) tot, job_id, job_dir, job_name = decode_log_file(l) for n, j in enumerate(job_id): state, oszi, nsw = decode_q(j, job_dir[n]) print(state, job_dir[n], j) if state == 'R' and isinstance(oszi, Oszicar): try: print('Ionic Steps', len(oszi.ionic_steps), nsw, oszi.ionic_steps[-1]) except: print('First Ionic Step', oszi.electronic_steps) states.append(state) running_states = [s for s in states if s == 'R'] print('{0} of {1} total jobs running'.format( len(running_states), len(job_id))) elif args.command == 'cancel_project': states = [] if args.i: f = open(args.i) project_spec = yaml.load(f) workflow_logs = [ fi for fi in glob('{}*.log'.format(project_spec['NAME'])) if 'InSilico' not in fi ] for l in workflow_logs: states = [] print('Qcheck on {}'.format(l)) tot, job_id, job_dir, job_name = decode_log_file(l) for n, j in enumerate(job_id): os.system('scancel {}'.format(str(job_id)))
) print('') # system name to be inserted in job_vasp.sh system_name = 'N' # SETTING CHARGE STATE DICTIONARY charge_states_dict = {'Sr': [0, 1, 2], 'Ca': [0, 1, 2]} potcar_symbols = ['Na', 'Nb_pv', 'O'] ########################################################################### structure = Poscar.from_file('POSCAR_unit').structure default_inputs = DefaultInputs(structure) incar_settings = default_inputs.get_incar_default(xc='PBE') incar_settings['LVTOT'] = '.TRUE.' kpoints = Kpoints.gamma_automatic(kpts=(2, 2, 2)) supercell_size = 3 structure.make_supercell(supercell_size) structure_pure = structure.copy() for el in charge_states_dict: structure = structure_pure.copy() for s in structure.sites: if s.species_string == 'Na': defect_site = PeriodicSite(el, s.frac_coords, s.lattice) break substitution = Substitution(structure, defect_site) defect_site_coords = defect_site.frac_coords
# "LHFCALC" : ".TRUE.", # "HFSCREEN": 0.2, # "NKRED": 2, # "PRECFOCK": "Fast", # "AEXX": 0.24, } # creating Incar object from 'incar_dict' incar = Incar(incar_dict) # POSCAR poscar = Poscar(struct) poscar_dict = poscar.as_dict() # KPOINTS k = 7 kpoints = Kpoints.gamma_automatic(kpts=(k, k, k), shift=(0.0, 0.0, 0.0)) # get POTCAR with right order from POSCAR # check for prevoius element - if it's the same don't write it twice prevoius_el = None # initializing potcar_symbols potcar_symbols = [] #getting sites list sites = poscar_dict['structure']['sites'] # getting label for element on site for site_index in range(0, len(sites)): el = sites[site_index]['label'] # write only if it is different from the prevoious one if prevoius_el != el: potcar_symbols.append(potcar_choices[el]) prevoius_el = el
def make_vasp_defect_files(dictio, path_base, task_id, compo, hse=False, encut_redo=False): """ simple static method creating VASP files ready for defect computations Args: dictio: the defects data as a dictionnary path_base: where do we write the files task_id: some id of the bulk computed data compo: Composition of the bulk computed data hse: hse run or not """ count = 1 for site in dictio: #### bulk #### if site['short_name'] == 'bulk': bulk_unitcell = site['bulk_unitcell'].as_dict() uc_type = site['uc_type'] sc_type = site['sc_type'] for s in site['supercells']: defect_dist = round(closest_defect_distance(s['structure']), 2) bulk_info = '%s_%s_%s' % (uc_type, sc_type, str(defect_dist)) dict_transf = { 'history': [{ 'source': task_id, 'unitcell_type': site['uc_type'] }], 'defect_type': 'bulk', 'supercell': s['size'] } structs = {'bulk_unitcell': bulk_unitcell} dict_params = MPStaticSet(s['structure']).all_input incar_init = dict_params['INCAR'] incar = IncarSetup(incar_init, 'bulk', hse) if encut_redo: enmax = round( max([i.PSCTR['ENMAX'] for i in dict_params['POTCAR']]) * 1.3) incar['ENCUT'] = int(enmax) if hse: kpoint = Kpoints.gamma_automatic() else: kpoint = Kpoints.monkhorst_automatic() path = path_base + "/" + str( task_id ) + '_' + compo.reduced_formula + '/bulk/' + bulk_info os.makedirs(path) f = open(path + "/transformations.json", 'w') f.write(json.dumps(jsanitize(dict_transf))) g = open(path + "/structures.json", 'w') g.write(json.dumps(jsanitize(structs))) dict_params['POTCAR'].write_file(path + "/POTCAR") incar.write_file(path + "/INCAR") kpoint.write_file(path + "/KPOINTS") dict_params['POSCAR'].write_file(path + "/POSCAR") continue #### dielectric constants #### if site['short_name'] == 'dielectric': dict_transf = { 'history': [{ 'source': task_id, 'unit_cell': site['uc_type'] }], 'defect_type': 'dielectric' } dict_params = MPStaticSet(site['structure']).all_input incar = dict_params['INCAR'] kpoints = Kpoints.automatic_gamma_density(site['structure'], 2000) try: bs = m.get_bandstructure_by_material_id(task_id) if not bs.is_spin_polarized: incar['ISPIN'] = 1 else: incar['ISPIN'] = 2 except: incar['ISPIN'] = 1 incar = IncarSetup(incar, 'dielectric', hse) if encut_redo: enmax = round( max([i.PSCTR['ENMAX'] for i in dict_params['POTCAR']]) * 1.3) incar['ENCUT'] = int(enmax) path = path_base + "/" + str( task_id ) + '_' + compo.reduced_formula + "/" + 'dielectric/' + site[ 'uc_type'] os.makedirs(path) f = open(path + "/transformations.json", 'w') f.write(json.dumps(jsanitize(dict_transf))) dict_params['POTCAR'].write_file(path + "/POTCAR") incar.write_file(path + "/INCAR") kpoints.write_file(path + "/KPOINTS") dict_params['POSCAR'].write_file(path + "/POSCAR") continue #### defects #### uc_type = site['uc_type'] sc_type = site['sc_type'] for charge in site['charges']: uc = site['bulk_unitcell'].copy() bulk_unitcell = uc.as_dict() for s in site['supercells']: defect_dist = round(closest_defect_distance(s['structure']), 2) defect_info = '%s_%s_%s' % (uc_type, sc_type, str(defect_dist)) uc.make_supercell(s['size'], to_unit_cell=True) bulk_supercell = uc.as_dict() dict_transf = { 'history': [{ 'source': task_id, 'unit_cell': site['uc_type'] }], 'compo': compo.as_dict(), 'defect_type': site['short_name'], 'defect_site': site['unique_sites'].as_dict(), 'charge': charge, 'supercell': s['size'] } dict_params = MPRelaxSet(s['structure']).all_input try: defect_no_relaxation = s['struct_no_move'].as_dict() except: defect_no_relaxation = s['structure'].as_dict() structs = { 'bulk_unitcell': bulk_unitcell, 'bulk_supercell': bulk_supercell, 'defect_no_relaxation': defect_no_relaxation } incar = dict_params['INCAR'] incar = IncarSetup(incar, 'defect', hse) if encut_redo: enmax = round( max([i.PSCTR['ENMAX'] for i in dict_params['POTCAR']]) * 1.3) incar['ENCUT'] = int(enmax) if hse: kpoint = Kpoints.gamma_automatic() else: kpoint = Kpoints.monkhorst_automatic() path=path_base+"/"+str(task_id)+'_'+compo.reduced_formula+ \ '/'+str(site['short_name'])+"/"+"charge"+str(charge)+'/'+defect_info os.makedirs(path) f = open(path + "/transformations.json", 'w') f.write(json.dumps(jsanitize(dict_transf))) g = open(path + "/structures.json", 'w') g.write(json.dumps(jsanitize(structs))) comp = s['structure'].composition sum_elec = 0 elts = set() for p in dict_params['POTCAR']: if p.element not in elts: sum_elec += comp.as_dict()[p.element] * p.nelectrons elts.add(p.element) if charge != 0: incar['NELECT'] = sum_elec - charge dict_params['POTCAR'].write_file(path + "/POTCAR") incar.write_file(path + "/INCAR") kpoint.write_file(path + "/KPOINTS") dict_params['POSCAR'].write_file(path + "/POSCAR") count = count + 1 f.close() g.close()
lancer = "#!/bin/bash\n\n" for U in [0, 2, 4, 6, 8, 10]: print("U = %f" % U) # make job directory dirname = "U_%d" % U dirname = os.path.join(os.getcwd(), dirname) os.mkdir(dirname) # copy files shutil.copy("POSCAR", os.path.join(dirname, "POSCAR")) shutil.copy("POTCAR", os.path.join(dirname, "POTCAR")) kpoints = Kpoints.gamma_automatic((kx, ky, kz)) kpoints.write_file(os.path.join(dirname, "KPOINTS")) jobname = basename + ("_U%d" % U) + ".job" shutil.copy(job, os.path.join(dirname, jobname)) # complete and copy INCAR file with open("INCAR", "r") as fincar: incar_lines = "" for line in fincar: if "LDAUU" in line: line = " LDAUU = %3.1f 0.0\n" % U incar_lines += line with open(os.path.join(dirname, "INCAR"), "w") as fincar: fincar.write(incar_lines)