def from_files(poscar_filename, locpot_filename, outcar_filename, shift=0): p = Poscar.from_file(poscar_filename) l = Locpot.from_file(locpot_filename) o = Outcar(outcar_filename) return WorkFunctionAnalyzer(p.structure, l.get_average_along_axis(int(sys.argv[1])), o.efermi, shift=shift)
def get_all_poscars(self, dirslist,root_path='./'): poscar = [] if self.checkstart(dirslist) and self.checkend(dirslist): for i in range(0,len(dirslist)): if i != 0 and i != len(dirslist)-1: path = os.path.join(root_path, dirslist[i], "CONTCAR") else: path = os.path.join(root_path, dirslist[i], "POSCAR") poscar.append(Poscar.from_file(path)) else: assert 0 == 1, "Check your start and end directories. POSCAR or OSZICAR may be missing" return poscar
def isotope (self,before="POMASS = 1.000", change="POMASS = 3.000"): from pymatgen.io.vasp.outputs import Poscar from pymatgen.io.vasp.inputs import PotcarSingle from pymatgen.io.vasp.outputs import Potcar from pymatgen.io.vasp.sets import MPNMRSet import shutil import os os.chdir(self.dire) print (os.getcwd()) poscar= Poscar.from_file("POSCAR") # structure = poscar.structure print (poscar) potcar= Potcar.from_file("POTCAR") potc=str(potcar) cc= potc.replace(before,change) vv=PotcarSingle(cc,) vv.write_file("POTCAR_1") print (vv)
def create_entry_from_vasp(self, eps_slab=None, d_slab=None): """ create a database entry from data obtained directly from vasp output files with the format { functional: { mu: xxx, vac_x: { VBM: xxx } } } """ if self.dbentry: ## load existing json file mater = self.load_from_json() else: ## or initialize empty dict mater = {} self.dbentry = "%s.json" % self.system for i, func in enumerate(self.funcs): if func not in mater: mater[func] = {} dir_func = os.path.join(self.dir_dft, func.split('+')[0]) if "mag" in osutils.listdironly(dir_func): dir_func = os.path.join(dir_func, "mag") if self.monolayer: ## for a monolayer system, enter every vacuum subdirectory ## and extract total energy, Band gap, VBM for vac in osutils.listdironly(dir_func): if vac[0:3] == "vac": dir_vac = os.path.join(dir_func, vac) if func.split('+')[-1] == "soc": dir_vac = os.path.join(dir_vac, "soc") vrfile = os.path.join(dir_vac, 'vasprun.xml') if not os.path.exists(vrfile): self.log.info("vasprun.xml file does not exist") else: if vac not in mater[func]: mater[func][vac] = {} vr = Vasprun(vrfile) gap, cbm, vbm, direct = vr.eigenvalue_band_properties # mater[func][vac].update({"Etot": vr.final_energy}) # mater[func][vac].update({"Egap": gap}) mater[func][vac].update({"VBM": vbm}) if vac == "vac_20": ## get mu = energy per formula unit ## usually vacuum spacing of 20 A is sufficiently well-converged ## so we'll use that energy... structure = Poscar.from_file( os.path.join(dir_vac, 'POSCAR')).structure formula_units = ( structure.composition.num_atoms / structure.composition.reduced_composition. num_atoms) mater[func].update( {"mu": vr.final_energy / formula_units}) mater[func].update({"Egap": gap}) if "vac_20" not in osutils.listdironly(dir_func): self.log.info("can't find the vac_20 subdirectory :( \ I don't know what converged energy to use..." ) if eps_slab and d_slab: mater[func].update({"eps_slab": eps_slab[i]}) mater[func].update({"d_slab": d_slab[i]}) else: ## for bulk system, this should be a lot more straightforward structure = Poscar.from_file(os.path.join(dir_func, 'POSCAR')).structure formula_units = ( structure.composition.num_atoms / structure.composition.reduced_composition.num_atoms) vrfile = os.path.join(dir_func, 'vasprun.xml') if not os.path.exists(vrfile): self.log.info("vasprun.xml file does not exist") else: vr = Vasprun(vrfile) gap, cbm, vbm, direct = vr.eigenvalue_band_properties mater[func].update({"mu": vr.final_energy / formula_units}) # mater[func].update({"Egap": gap}) # mater[func].update({"VBM": vbm}) ## write json file self.write_to_json(mater) return
def kumagai_loader(self, bulk_outcar=None): """Load metadata required for performing Kumagai correction requires "bulk_path" and "defect_path" to be loaded to DefectEntry parameters dict. Args: bulk_outcar (Outcar): Add bulk Outcar object for expedited parsing. If None, will load from file path variable bulk_path Return: bulk_outcar object for reuse by another defect entry (for expedited parsing) """ if not self.defect_entry.charge: # dont need to load outcars if charge is zero return None if not bulk_outcar: bulk_outcar_path = os.path.join( self.defect_entry.parameters["bulk_path"], "OUTCAR") bulk_outcar = Outcar( bulk_outcar_path) def_outcar_path = os.path.join( self.defect_entry.parameters["defect_path"], "OUTCAR") def_outcar = Outcar( def_outcar_path) bulk_atomic_site_averages = bulk_outcar.electrostatic_potential defect_atomic_site_averages = def_outcar.electrostatic_potential bulk_sc_structure = Poscar.from_file( os.path.join( self.defect_entry.parameters["bulk_path"], "POSCAR")).structure if os.path.exists( os.path.join( self.defect_entry.parameters["defect_path"], "POSCAR")): initial_defect_structure = Poscar.from_file( os.path.join( self.defect_entry.parameters["defect_path"], "POSCAR")).structure elif self.defect_vr: initial_defect_structure = self.defect_vr.initial_structure else: initial_defect_structure = Vasprun( os.path.join( self.defect_entry.parameters["defect_path"], "vasprun.xml")).initial_structure bulksites = [site.frac_coords for site in bulk_sc_structure] initsites = [site.frac_coords for site in initial_defect_structure] distmatrix = initial_defect_structure.lattice.get_all_distances(bulksites, initsites) # first index of this list is bulk index min_dist_with_index = [[min(distmatrix[bulk_index]), int(bulk_index), int(distmatrix[bulk_index].argmin())] for bulk_index in range(len(distmatrix))] # list of [min dist, bulk ind, defect ind] site_matching_indices = [] poss_defect = [] if isinstance(self.defect_entry.defect, (Vacancy, Interstitial)): for mindist, bulk_index, defect_index in min_dist_with_index: if mindist < 0.1: site_matching_indices.append([bulk_index, defect_index]) elif isinstance(self.defect_entry.defect, Vacancy): poss_defect.append([bulk_index, bulksites[bulk_index][:]]) if isinstance(self.defect_entry.defect, Interstitial): poss_defect = [[ind, fc[:]] for ind, fc in enumerate(initsites) \ if ind not in np.array(site_matching_indices)[:, 1]] elif isinstance(self.defect_entry.defect, Substitution): for mindist, bulk_index, defect_index in min_dist_with_index: species_match = bulk_sc_structure[bulk_index].specie == \ initial_defect_structure[defect_index].specie if mindist < 0.1 and species_match: site_matching_indices.append([bulk_index, defect_index]) elif not species_match: poss_defect.append([defect_index, initsites[defect_index][:]]) if len(poss_defect) == 1: defect_index_sc_coords = poss_defect[0][0] defect_frac_sc_coords = poss_defect[0][1] else: raise ValueError("Found {} possible defect sites when matching bulk and " "defect structure".format(len(poss_defect))) if len(set(np.array(site_matching_indices)[:, 0])) != len(set(np.array(site_matching_indices)[:, 1])): raise ValueError("Error occured in site_matching routine. Double counting of site matching " "occured:{}\nAdvising against Kumagai parsing.".format(site_matching_indices)) # user Wigner-Seitz radius for sampling radius wz = initial_defect_structure.lattice.get_wigner_seitz_cell() dist = [] for facet in wz: midpt = np.mean(np.array(facet), axis=0) dist.append(np.linalg.norm(midpt)) sampling_radius = min(dist) self.defect_entry.parameters.update({"bulk_atomic_site_averages": bulk_atomic_site_averages, "defect_atomic_site_averages": defect_atomic_site_averages, "initial_defect_structure": initial_defect_structure, "site_matching_indices": site_matching_indices, "sampling_radius": sampling_radius, "defect_frac_sc_coords": defect_frac_sc_coords, "defect_index_sc_coords": defect_index_sc_coords}) return bulk_outcar
def from_paths( path_to_defect, path_to_bulk, dielectric, defect_charge, mpid = None, compatibility=DefectCompatibility(), initial_defect_structure = None): """ Identify defect object based on file paths. Minimal parsing performing for instantiating the SingleDefectParser class. :param path_to_defect (str): path to defect file of interest :param path_to_bulk (str): path to bulk file of interest :param dielectric (float or 3x3 matrix): ionic + static contributions to dielectric constant :param defect_charge (int): :param mpid (str): :param compatibility (DefectCompatibility): Compatibility class instance for performing compatibility analysis on defect entry. Return: Instance of the SingleDefectParser class. """ parameters = {"bulk_path": path_to_bulk, "defect_path": path_to_defect, "dielectric": dielectric, "mpid": mpid} # add bulk simple properties bulk_vr = Vasprun( os.path.join(path_to_bulk, "vasprun.xml")) bulk_energy = bulk_vr.final_energy bulk_sc_structure = bulk_vr.initial_structure.copy() # add defect simple properties defect_vr = Vasprun( os.path.join(path_to_defect, "vasprun.xml")) defect_energy = defect_vr.final_energy # Can specify initial defect structure (to help PyCDT find the defect site if # multiple relaxations were required, else use from defect relaxation OUTCAR: if initial_defect_structure: initial_defect_structure = Poscar.from_file(initial_defect_structure).structure.copy() else: initial_defect_structure = defect_vr.initial_structure.copy() # identify defect site, structural information, and create defect object num_ids = len(initial_defect_structure) num_bulk = len(bulk_sc_structure) if num_ids == num_bulk - 1: defect_type = "Vacancy" elif num_ids == num_bulk + 1: defect_type = "Interstitial" elif num_ids == num_bulk: defect_type = "Substitution" else: raise ValueError("Could not identify defect type just from number of sites in structure: " "{} in bulk vs. {} in defect?".format( num_ids, num_bulk )) defect_index_sc_coords = None transformation_path = os.path.join( path_to_defect, "transformation.json") if os.path.exists( transformation_path): tf = loadfn( transformation_path) site = tf["defect_supercell_site"] if defect_type == "Vacancy": poss_deflist = sorted( bulk_sc_structure.get_sites_in_sphere(site.coords, 0.1, include_index=True), key=lambda x: x[1]) else: poss_deflist = sorted( initial_defect_structure.get_sites_in_sphere(site.coords, 0.1, include_index=True), key=lambda x: x[1]) if not len(poss_deflist): raise ValueError("{} specified defect site {}, but could not find it in bulk_supercell." " Abandoning parsing".format( transformation_path, site)) else: defect_index_sc_coords = poss_deflist[0][2] else: print("No transformation file exists at {}.\nCalculating defect index manually" " (proceed with caution)".format( transformation_path)) # IF not transformation file exists, the defect_index_sc_coords will not be identified in previous routine, # proceed by identifying the defect site through a comparison of bulk sites and initial defect structure sites. # WARNING: this can cause issues if intial_defect_structure is slightly different than # bulk_sc_structure (as a result of multiple relaxation steps, for example) if defect_index_sc_coords is None: bulksites = [site.frac_coords for site in bulk_sc_structure] initsites = [site.frac_coords for site in initial_defect_structure] distmatrix = initial_defect_structure.lattice.get_all_distances(bulksites, initsites) min_dist_with_index = [[min(distmatrix[bulk_index]), int(bulk_index), int(distmatrix[bulk_index].argmin())] for bulk_index in range(len(distmatrix))] # list of [min dist, bulk ind, defect ind] site_matching_indices = [] poss_defect = [] if defect_type in ["Vacancy", "Interstitial"]: for mindist, bulk_index, defect_index in min_dist_with_index: if mindist < 0.1: site_matching_indices.append([bulk_index, defect_index]) elif defect_type == "Vacancy": poss_defect.append([bulk_index, bulksites[bulk_index][:]]) if defect_type == "Interstitial": poss_defect = [[ind, fc[:]] for ind, fc in enumerate(initsites) \ if ind not in np.array(site_matching_indices)[:, 1]] elif defect_type == "Substitution": for mindist, bulk_index, defect_index in min_dist_with_index: species_match = bulk_sc_structure[bulk_index].specie == \ initial_defect_structure[defect_index].specie if mindist < 0.1 and species_match: site_matching_indices.append([bulk_index, defect_index]) elif not species_match: poss_defect.append([defect_index, initsites[defect_index][:]]) if len(poss_defect) == 1: defect_index_sc_coords = poss_defect[0][0] else: raise ValueError("Found {} possible defect sites when matching bulk and " "defect structure".format(len(poss_defect))) if len(set(np.array(site_matching_indices)[:, 0])) != len(set(np.array(site_matching_indices)[:, 1])): raise ValueError("Error occured in site_matching routine. Double counting of site matching " "occured:{}\nAbandoning structure parsing.".format(site_matching_indices)) if defect_type == "Vacancy": defect_site = bulk_sc_structure[ defect_index_sc_coords] else: defect_site = initial_defect_structure[ defect_index_sc_coords] for_monty_defect = {"@module": "pymatgen.analysis.defects.core", "@class": defect_type, "charge": defect_charge, "structure": bulk_sc_structure, "defect_site": defect_site} defect = MontyDecoder().process_decoded(for_monty_defect) test_defect_structure = defect.generate_defect_structure() if not StructureMatcher(stol=0.5, primitive_cell=False, scale=False, attempt_supercell=False, allow_subset=False).fit(test_defect_structure, defect_vr.initial_structure): # NOTE: this does not insure that cartesian coordinates or indexing are identical # Note: I've changed stol to 0.5 to fix matching for defects that move the f**k about yo raise ValueError("Error in defect object matching!") defect_entry = DefectEntry(defect, defect_energy - bulk_energy, corrections={}, parameters=parameters) return SingleDefectParser( defect_entry, compatibility=compatibility, defect_vr=defect_vr, bulk_vr=bulk_vr)
(options, args) = parser.parse_args() v = Vasprun('vasprun.xml') cdos = v.complete_dos element_dos = cdos.get_element_dos() plotter = DosPlotter() efermi = v.efermi if options.verbose: from pymatgen.core import Element from pymatgen.io.vasp.outputs import Locpot, Poscar from pymatgen.analysis.surface_analysis import WorkFunctionAnalyzer l = Locpot.from_file('LOCPOT') s = Poscar.from_file('CONTCAR') wf = WorkFunctionAnalyzer(s.structure, l.get_average_along_axis(1), efermi, shift=0) loc_vac = wf.vacuum_locpot for i in element_dos: element_dos[i].efermi = loc_vac plotter.add_dos_dict(element_dos) plt = plotter.get_plot(xlim=[-9, 1]) plt.plot([efermi - loc_vac, efermi - loc_vac], plt.ylim(), 'b--',
def poscars(self): return [Poscar(s) for s in self.structures]
def poscar(self): return Poscar(self.structures[0])
##### Part 2: Iteration for each file. ######################################## for filename in file_list: #### Part 2-1: determine the type of file. xml or POSCAR #### if filename[-4::]=='.xml': ## if file is xml dos_vrun=Vasprun(filename) total_dos = dos_vrun.complete_dos #get structure from vasprun # struct = dos_vrun.structures[-1] struct = dos_vrun.final_structure else: # if file is POSCAR format P=Poscar.from_file(filename) struct = P.structure ############################################################# #### Part 2-2: Make dictionary: from label to site index #### #For easy use, Labe them in conventional manner. #labeled as Element + site position in poscar #(i.e. BaTiO3 --> Ba1, Ti2, O1, O2, O3) n_atom_count_dict=dict() label2site_index=dict() for i in range(0,struct.num_sites): # Update label for each element if struct[i].specie in list(n_atom_count_dict.keys()): n_atom_count_dict.update({struct[i].specie:n_atom_count_dict[struct[i].specie]+1}) else: n_atom_count_dict.update({struct[i].specie:1})
from ocelot.task.emtensor import * from pymatgen.io.vasp.outputs import Eigenval, Spin, Outcar, Poscar import glob from pprint import pprint kfc = (0, 0, 0) iband = 15 ss = 0.01 ss = rb2ra(0.01) # outcar = Outcar('OUTCAR') poscar = Poscar.from_file('POSCAR', check_for_POTCAR=False) real_latt = poscar.structure.lattice.matrix real_latt = np.array(real_latt) reci_matrix = 2 * np.pi * np.linalg.inv(real_latt).T # 1/A eigens = Eigenval('./EIGENVAL').eigenvalues[Spin.up] flat_eigens = [x[iband][1] for x in eigens] flat_eigens = np.array(flat_eigens) emt = EmTensor(kfc, iband, ss, reci_matrix, flat_eigens, 3) emt.write_kmesh('KPOINTS_ocelot') ems, es, eigenvs_frac, eigenvs_cart = emt.cal_emtensor() pprint(ems) """ [[-2.90687151 0. 0. ] [ 0. -2.90687151 0. ] [ 0. 0. -2.90687151]] array([-0.34401245, -0.34401245, -0.34401245]) """
def Phonopyinput (self,mpid,intt): from pymatgen.io.vasp.outputs import Poscar from pymatgen.io.vasp.inputs import Kpoints, Poscar from pymatgen.io.vasp.outputs import Potcar from pymatgen.io.vasp.sets import MPNMRSet from pymatgen.io.phonopy import get_displaced_structures from pymatgen.symmetry.bandstructure import HighSymmKpath import shutil import os import subprocess from pymatgen import Structure from pymatgen.io.vasp.sets import MPRelaxSet from pymatgen.ext.matproj import MPRester import shutil from shutil import copyfile import sys from pymatgen.symmetry.kpath import KPathSeek,KPathBase os.chdir(self.dire) print (os.getcwd()) ptt = os.path.join(self.dire, mpid+"isotope") os.chdir(ptt) poscar= Poscar.from_file("POSCAR") structure = poscar.structure for filenames in os.walk(ptt): print (filenames) dire3 = './INCAR' #确立脚本名称 shutil.copy(r"D:\Desktop\VASP practical\Input\INCAR_std\INCAR_phono", dire3 ) print ("INCAR copied") dire2 = './vaspstd_sub' #确立脚本名称 shutil.copy(r"D:\Desktop\VASP practical\Input\INCAR_std\vaspstd_sub", dire2 ) print ("vaspstd_sub copied") # os.chdir(self.dire) # os.chdir(ptt) # print (os.getcwd()) # print ("Right") copyfile("POSCAR","POSCAR-unitcell") copyfile("SPOSCAR","POSCAR") print ("POSCAR copied") # cd=KPathSeek(structure) # # jh=cd.get_kpoints # kk=Kpoints.automatic_linemode(divisions=10, ibz=cd) # # kk=Kpoints.automatic_density(structure, kppa=intt) # kk.write_file("KPOINTS") print ("KPOINTS writed") os.chdir(self.dire) sys.exit()
def run_task(self, fw_spec): """ Required Parameters: folder (str path): Location where vasp inputs are to be written custodian_params (dict **kwargs): Contains the job and the scratch directory for a custodian run vaspdbinsert_parameters (dict **kwargs): Contains informations needed to acess a DB, eg, host, port, password etc. Optional Parameters: min_vac_size (float): Size of vacuum layer of slab in Angstroms min_slab_size (float): Size of slab layer of slab in Angstroms angle_tolerance (int): See SpaceGroupAnalyzer in analyzer.py user_incar_settings (dict): See launch_workflow() method in CreateSurfaceWorkflow class k_product (dict): See launch_workflow() method in CreateSurfaceWorkflow class potcar_functional (dict): See launch_workflow() method in CreateSurfaceWorkflow class symprec (float): See SpaceGroupAnalyzer in analyzer.py terminations (bool): Determines whether or not to consider different terminations in a slab. If true, each slab with a specific shift value will have its own Firework and each of the slab calculations will run in parallel. Defaults to false which sets the shift value to 0. """ dec = MontyDecoder() folder = dec.process_decoded(self.get("folder")) cwd = dec.process_decoded(self.get("cwd")) symprec = dec.process_decoded(self.get("symprec", 0.001)) angle_tolerance = dec.process_decoded(self.get("angle_tolerance", 5)) terminations = dec.process_decoded(self.get("terminations", False)) custodian_params = dec.process_decoded(self.get("custodian_params")) vaspdbinsert_parameters = \ dec.process_decoded(self.get("vaspdbinsert_parameters")) user_incar_settings = \ dec.process_decoded(self.get("user_incar_settings", MPSlabVaspInputSet().incar_settings)) k_product = \ dec.process_decoded(self.get("k_product", 50)) potcar_functional = \ dec.process_decoded(self.get("potcar_fuctional", 'PBE')) min_slab_size = dec.process_decoded(self.get("min_slab_size", 10)) min_vacuum_size = dec.process_decoded(self.get("min_vacuum_size", 10)) miller_index = dec.process_decoded(self.get("miller_index")) print 'about to make mplb' mplb = MPSlabVaspInputSet(user_incar_settings=user_incar_settings, k_product=k_product, potcar_functional=potcar_functional, ediff_per_atom=False) # Create slabs from the relaxed oriented unit cell. Since the unit # cell is already oriented with the miller index, entering (0,0,1) # into SlabGenerator is the same as obtaining a slab in the # orienetation of the original miller index. print 'about to copy contcar' contcar = Poscar.from_file("%s/CONTCAR.relax2.gz" %(cwd+folder)) relax_orient_uc = contcar.structure print 'made relaxed oriented structure' print relax_orient_uc print 'making slab' slabs = SlabGenerator(relax_orient_uc, (0,0,1), min_slab_size=min_slab_size, min_vacuum_size=min_vacuum_size, max_normal_search=max(miller_index)) # Whether or not to create a list of Fireworks # based on different slab terminations print 'deciding terminations' slab_list = slabs.get_slabs() if terminations else [slabs.get_slab()] qe = QueryEngine(**vaspdbinsert_parameters) optional_data = ["state"] print 'query bulk entry for job completion' bulk_entry = qe.get_entries({'chemsys': relax_orient_uc.composition.reduced_formula, 'structure_type': 'oriented_unit_cell', 'miller_index': miller_index}, optional_data=optional_data) print 'chemical formula', relax_orient_uc.composition.reduced_formula print 'fomular data type is ', type(relax_orient_uc.composition.reduced_formula) print 'checking job completion' print bulk_entry for entry in bulk_entry: print 'for loop' print entry.data['state'] if entry.data['state'] != 'successful': print "%s bulk calculations were incomplete, cancelling FW" \ %(relax_orient_uc.composition.reduced_formula) return FWAction() else: print entry.data['state'] FWs = [] for slab in slab_list: print slab new_folder = folder.replace('bulk', 'slab')+'_shift%s' \ %(slab.shift) # Will continue an incomplete job from a previous contcar file if it exists print 'cwd is %s' %(os.getcwd()) print 'the folder is %s' %(new_folder) print os.path.join(os.getcwd(), new_folder) print cwd+'/'+new_folder path = cwd+'/'+new_folder # path = os.path.join(os.getcwd(), folder) newfolder = os.path.join(path, 'prev_run') # print 'check if conditions for continuing calculations have been satisfied' # print 'check for the following path: %s' %(path) # print os.path.exists(path) # print os.path.exists(os.path.join(path, 'CONTCAR.gz')) # print os.stat(os.path.join(path, 'CONTCAR.gz')).st_size !=0 def continue_vasp(contcar): print folder, 'already exists, will now continue calculation' print 'making prev_run folder' os.system('mkdir %s' %(newfolder)) print 'moving outputs to prev_run' os.system('mv %s/* %s/prev_run' %(path, path)) print 'moving outputs as inputs for next calculation' os.system('cp %s/%s %s/INCAR %s/POTCAR %s/KPOINTS %s' %(newfolder, contcar, newfolder, newfolder, newfolder, path)) print 'unzipping new inputs' os.system('gunzip %s/*' %(path)) print 'copying contcar as new poscar' if contcar == 'CONTCAR.relax1.gz': os.system('mv %s/CONTCAR.relax1 %s/POSCAR' %(path , path)) else: os.system('mv %s/CONTCAR %s/POSCAR' %(path , path)) if os.path.exists(path) and \ os.path.exists(os.path.join(path, 'CONTCAR')) and \ os.stat(os.path.join(path, 'CONTCAR')).st_size !=0: continue_vasp('CONTCAR') elif os.path.exists(path) and \ os.path.exists(os.path.join(path, 'CONTCAR.gz')) \ and os.stat(os.path.join(path, 'CONTCAR.gz')).st_size !=0: continue_vasp('CONTCAR.gz') elif os.path.exists(path) and \ os.path.exists(os.path.join(path, 'CONTCAR.relax1.gz')) and \ os.stat(os.path.join(path, 'CONTCAR.relax1.gz')).st_size !=0: continue_vasp('CONTCAR.relax1.gz') else: mplb.write_input(slab, cwd+new_folder) # Writes new INCAR file based on changes made by custodian on the bulk's INCAR. # Only change in parameters between slab and bulk should be MAGMOM and ISIF if os.path.exists("%s/INCAR.relax2.gz" %(cwd+folder)): incar = Incar.from_file(cwd+folder +'/INCAR.relax2.gz') else: incar = Incar.from_file(cwd+folder +'/INCAR.relax2') if os.path.exists("%s/OUTCAR.relax2.gz" %(cwd+folder)): out = Outcar(cwd+folder+'/OUTCAR.relax2.gz') else: out = Outcar(cwd+folder+'/OUTCAR.relax2') out_mag = out.magnetization tot_mag = [mag['tot'] for mag in out_mag] magmom = np.mean(tot_mag) mag= [magmom for i in slab] incar.__setitem__('MAGMOM', mag) incar.__setitem__('ISIF', 2) incar.__setitem__('AMIN', 0.01) incar.__setitem__('AMIX', 0.2) incar.__setitem__('BMIX', 0.001) incar.__setitem__('NELMIN', 8) incar.__setitem__('ISTART', 0) incar.write_file(cwd+new_folder+'/INCAR') fw = Firework([RunCustodianTask(dir=new_folder, cwd=cwd, **custodian_params), VaspSlabDBInsertTask(struct_type="slab_cell", loc=new_folder, cwd=cwd, shift=slab.shift, surface_area=slab.surface_area, vsize=slabs.min_vac_size, ssize=slabs.min_slab_size, miller_index=miller_index, **vaspdbinsert_parameters)], name=new_folder) FWs.append(fw) return FWAction(additions=FWs)