def save_fig(fig, plot_title, save_mode=None, folder=None): fig_name = None if save_mode is None: save_mode = input( "Type [s]ave to save {0} ([d]efault name) \n".format(plot_title)) if folder is None: folder = os.path.join(platform_id.local_cluster_dir(), "figures") os.chdir(folder) print("in", os.getcwd()) try: if save_mode[0] == "s": fig_name = input("Type the file name (SVG format) : ") elif save_mode[0] == "d": fig_name = platform_id.get_file_name(folder, plot_title.replace(' ', '_'), ext=".svg") print("default name : {}".format(fig_name)) #print(os.getcwd(), param['mainFolder']) except IndexError: print("option not recognized, figure not saved") if fig_name is not None: fig.savefig("{}.svg".format(fig_name), bbox_inches='tight') print("{} saved in {}".format(fig_name.split("/")[-1], folder)) return fig_name
def make_COOP_dir(folder): COOP_dir = "{}/COOP".format(folder) COOP_dir = platform_id.get_file_name(folder, COOP_dir) os.mkdir(COOP_dir) shutil.copy2('{}/INCAR'.format(folder), '{}/INCAR'.format(COOP_dir)) shutil.copy2('{}/KPOINTS'.format(folder), '{}/KPOINTS'.format(COOP_dir)) shutil.copy2('{}/CONTCAR'.format(folder), '{}/POSCAR'.format(COOP_dir)) shutil.copy2('{}/POTCAR'.format(folder), '{}/POTCAR'.format(COOP_dir)) return (COOP_dir)
def initialize(working_dir): global PARAM # Check if we are on frodon PARAM["run_on_cluster"] = platform_id.first_check_cluster() # get the path to the "project folder" # (i.e. the common parent directory of all the jobs to plot) PARAM['mainFolder'] = working_dir print("\nMain directory : \n{0} \n".format(PARAM['mainFolder'])) print( "\n preparing the run with the following parameters :\n{} \n\n".format( PARAM)) log_file_name = platform_id.get_file_name(PARAM['mainFolder'], "log") logging.basicConfig(filename=log_file_name, level=logging.DEBUG) logging.info('Started') parameters = PARAM return parameters
def generate_job_folders(final_job_list, parent_folder, project_name, selective_dynamic=None): "Search and create a new parent dir and individual jobs dirs" project_dir = platform_id.get_file_name( parent_folder, project_name, ext="") print(project_dir) os.mkdir(project_dir) folder_list = [] if selective_dynamic not in [None, "False"]: make_selective_dynamic(final_job_list, selective_dynamic) for job in final_job_list: job.set_job_folder(project_dir) job.write_data_input() print("set written in {}".format(job.job_folder)) return folder_list
def multi_distortion_desodiation_scan(): """takes a list of structures with Mg remove Na using iterative madelung algorithm create a trigonal-prismatic distortion on all MnO6 octa Return a list of all desodiated & distorted structures """ cluster = True incar_static = default_incar() incar_static.update({ 'ENCUT': 500, 'PREC': 'Normal', 'EDIFF': 1e-04, 'NELMDL': -15, 'NELM': 400, # fichier DOS (impair pour fermi lvl) 'NEDOS': 1001, # -5 pour les isolants (et simga=0) 'ISMEAR': 0, # pametre de smearing( pour converger l'entropie) 'SIGMA': 0.05, 'IBRION': 2, 'NSW': 300, # Nombre pas maximum relax ionique # on ne relaxe que la position des sodium, pas de la cellule 'ISIF': 0, 'LCHARG': 'FALSE', 'LAECHG': 'FALSE', # edition fichier charge 'LASPH': '.TRUE.', # non isotropic d orbitals 'ISYM': 0, # = 1 symetrie est gardee }) if cluster: parent_folder = "/home/jvergnet/frofro/honeycomb/" else: parent_folder = "/mnt/fbf83589-c1e0-460b-86cc-ec3cc4a64545/backup_cluster/backup_fro/" project_name = "prismatic_zoom" cif_folder = "/home/jvergnet/frofro/honeycomb/ref_POSCAR_MgMn/" cif_list = ["SYM_P2EEL_POSCAR"] # get the O2 /P2 / O3 with Mg pristine_stackings = get_pristine_list( cif_list, cif_folder, is_poscar=True) # [ {'structure' : O2 , "id" : "O2 } ] print(pristine_stackings) project_dir = platform_id.get_file_name( parent_folder, project_name, ext="") os.mkdir(project_dir) os.chdir(project_dir) for specie_stacking in pristine_stackings: # stacking_dir = projectDir+"/"+specie_stacking['id'] # print(stacking_dir) # os.mkdir(stacking_dir) # os.chdir(stacking_dir) # all_structures = [] ordered_stackings = [{'structure': specie_stacking['structure'], 'id': specie_stacking['id']}] # get_complete_structure_list([specie_stacking], launch_choice='s') for struct_stacking in ordered_stackings: struct_stacking['structure'].remove_species(["Na"]) print([s["structure"] for s in ordered_stackings]) with Pool(processes=cpu_count()) as parrallel_threads: list_of_struct_list = parrallel_threads.starmap( get_structure_list_wrapper, [([struct_stacking], 'b') for struct_stacking in ordered_stackings]) parrallel_threads.close() parrallel_threads.join() final_job_list = list(chain(*list_of_struct_list)) # for desodiated_structure in desodiated_stackings : # print(desodiated_structure["id"]) # # desodiate each stacking and write create folders for each state # all_structures += get_complete_structure_list(\ # [desodiated_structure],launch_choice='b') # # for prismatic_disto_struct in prismatic_distorted_structures : # # print(prismatic_disto_struct["id"]) # # all_structures += get_complete_structure_list(\ # [prismatic_disto_struct],launch_choice='t') # , name_format="sulfide") adjust_incar(final_job_list, incar_setting={}, v_d_w=None, perturb=None) generate_job_folders(final_job_list, project_dir, specie_stacking['id'], selective_dynamic=True) os.chdir(project_dir) return "OK"
def plot_charge_and_mag(rundict_list, detailled=None, **kwargs): """ plot charge and magmom for each specie of a starting structure then save them with proper name """ print("\n==== ELECTRONS IN REAL SPACE (BADER) ======\n") coord = kwargs["coord"] if "coord" in kwargs.keys() else "x_na" print(rundict_list[0].structure_data) if detailled is None: detailled = 0 try: print(""" plotting charge and magmom 0 = total charge only 1 = site charges 2 = site chg and mag, 3 = chg & mag volumes ( < 0 to pass)\n """) detailled = int(input("Level of verbosity ? : ")) except BaseException: print("default to 0") figures = {} elements = set() for run in rundict_list: elements.update( set(run.structure.composition.get_el_amt_dict().keys())) if detailled == 0: for specie in elements: fig_name = "total_charge_on_{}".format(specie) figures[fig_name] = plt.figure(fig_name) axe = figures[fig_name].add_subplot(1, 1, 1) g_plot.plot_site_value_evolution( rundict_list, specie, value="charge", coord=coord, plot_type=[4], axe0=axe) # plot_type = 0:sites 1:avg 2:min 3:max 4:sum/nbCell if detailled > 0: prop_list = ["charge"] if detailled > 1: prop_list.append('magnetization') if detailled > 2: prop_list += ['vol_chg', 'vol_mag'] for specie in elements: for prop in prop_list: figures["{}_of_{}".format(prop, specie)] = g_plot.plot_site_value_evolution( rundict_list, specie, value=prop, coord=coord, plot_type=[0, 1]) # plot_type = 0:sites 1:avg 2:min 3:max 4:sum try: print('Type[s]ave to save all bader figures \n') if input(": ") == "s": prefix = input(" prefix ? : ") for f_name in figures.keys(): current_fig_name = platform_id.get_file_name( os.getcwd(), "{}_{}".format(prefix, f_name), ext=".svg") figures[f_name].savefig( "{}.svg".format(current_fig_name), bbox_inches='tight') except Exception: pass return True
def main(): "main function : read runs in folder & rerun them according to user input" try: main_dir = sys.argv[1] except IndexError: main_dir = os.getcwd() print("in current folder : {}".format(main_dir)) rerun_type, incar_type = prompt_rerun_type() select_converged_runs = input( "Rerun [a]ll / [n]on-converged only / [c] converged-only ? : ") # do not parse vasprun if all job selected check_vasprun = 0 if select_converged_runs in ["a"] else 0.9 try: file_system = input("[j]ob / [p]roject / [s]uper_project ? : ")[0] except Exception: file_system = "p" print("filesystem : {}".format(file_system)) # Create a list of all the valid runs in the selected folders run_list = read.collect_valid_runs(main_dir, checkDiff=False, vasprun_parsing_lvl=check_vasprun, file_system_choice=file_system) converged_jobs = [d for d in run_list if d.status == 3] unconverged_jobs = [d for d in run_list if d.status < 3] # when reruning all jobs, they are all considered as unconverged if select_converged_runs in ["a", "n"]: rerun_list = unconverged_jobs elif select_converged_runs in ["c"]: rerun_list = converged_jobs rerun_list = filtering_runs(select_converged_runs, rerun_list) print("number of valid jobs to rerun : {}".format(len(rerun_list))) if len(rerun_list) == 0: print("no valid run") return 0 print("selected runs : \n {}".format( [print(rundict.str_id) for rundict in rerun_list])) try: perturb = float( input('Perturb the initial position of atoms ? in Angstrom ')) except Exception as ex: print("No perturbation") perturb = 0 dirname = incar_type if incar_type is not None else rerun_type print("current dirname ={}".format(dirname)) if incar_type == "fukui": fukui_nelec = float( input("nb elec for the fukui (>0: added, <0 : removed) ? ")) print("fukui electrons : {}".format(fukui_nelec)) elif rerun_type == "custom": try: tmpdir = str(input("Custom directory name ? :")) if len(tmpdir) > 0: dirname = tmpdir print(dirname) except Exception: print("error, default dirname to {}".format(dirname)) if file_system in ["p", "s"]: dirname_path = platform_id.get_file_name(main_dir, dirname) elif file_system in ["j"]: dirname_path = rerun_list[0].job_folder for rundict in rerun_list: # create Job from a RunDict job = launch.Job.from_rundict(rundict) files_to_copy = [] incar = {} if rerun_type == "identical": files_to_copy += ['INCAR', 'POTCAR', 'KPOINTS', 'CONTCAR'] # quick and dirty copy print("identical set generated") if rerun_type == "relaxation": if incar_type == "poscar_only": pass elif incar_type == "rebuild_from_scratch": pass elif incar_type == "less_precise": incar.update(less_precise_incar(job.structure)) print(" less precise set generated") elif incar_type == "more_precise": incar.update(more_precise_incar(job.structure)) print("more precise set generated") elif incar_type == "ultra_precise": incar.update(ultra_precise_incar()) print("ultra precise set generated") elif rerun_type == "custom": # incar['LDAUU']={'O': 6} # incar["NCORE"] = 8 # incar["KPAR"] = 2 # incar["NUPDOWN"] = 0 # HSE06 # incar["NSW"] = 0 # incar['LHFCALC'] = "TRUE" # incar['HFSCREEN'] = 0.2 # paramagnetic incar.update({ "ISPIN": 1 }) # anti-ferro-magnetic incar.update({ "ISPIN": 1 }) # kpt = Kpoints.gamma_automatic(kpts=(1, 1, 1), shift=(0, 0, 0)) # job.user_kpoint = kpt print("yolo!!") print("MODIFIED PARAMETERS ========", incar) print("{} set generated".format(dirname)) elif rerun_type == "single_point": if incar_type == "fukui": input_set = MITRelaxSet(job.structure) incar = rundict.parameters["incar"] incar["NELECT"] = input_set.nelect + fukui_nelec incar["NSW"] = 0 print("fukui correction added :", "\nNELECT read {} ==> wrote {}".format( input_set.nelect, input_set.nelect + fukui_nelec)) elif incar_type == "parcharg": efermi = rundict.data['efermi'] print(efermi) incar["LPARD"] = "True" below_fermi = float(input("Emin (Efermi=0) ?")) above_fermi = float(input("Emax (Efermi=0) ?")) incar["EINT"] = "{} {}".format(efermi+below_fermi, efermi+above_fermi) dirname += "_{}_{}".format(below_fermi, above_fermi) files_to_copy.append("WAVECAR") elif incar_type in ["static", "DOS"]: incar.update(single_point_incar()) if incar_type == "DOS": incar['EMIN'] = -5 incar['EMAX'] = 5 incar["NEDOS"] = 2001 # folder = prev_folder + "/DOS" # os.mkdir(folder) kpt_settings = {'reciprocal_density': 1000} else: kpt_settings = {'reciprocal_density': 300} job.user_kpoint = kpt_settings elif incar_type == "non_SCF": files_to_copy += ["CHGCAR", "CHG", "linear_KPOINTS"] incar.update({"IBRION": -1, "LCHARG": False, "LORBIT": 11, "LWAVE": False, "NSW": 0, "ISYM": 0, "ICHARG": 11, "ISMEAR": 0, "SIGMA": 0.01 }) for k in ["NELMDL", "MAGMOM"]: job.user_incar.pop(k, None) # job.set_job_folder(rerun_dir) kpt = drawkpt(rundict.structure) kpt.write_file(os.path.join( job.old_folder, "linear_KPOINTS")) job.old_folder = job.job_folder if file_system == "j": job.set_job_folder(platform_id.get_file_name(dirname_path, dirname), explicit_jobpath=True) else: if file_system == "p": job.set_job_folder(dirname_path, explicit_jobpath=False) if file_system == "s": # print(rundict.stacking) job.set_job_folder(os.path.join(dirname_path, rundict.stacking), explicit_jobpath=False) if incar.get('EDIFF', None) is not None: incar['EDIFF'] = '{:0.1E}'.format(incar['EDIFF']) for k in ["MAGMOM", "EINT", "LPARD", "SIGMA"]: job.user_incar.pop(k, None) job.user_incar.update(incar) print("INCAR \n", incar) print("JOB INCAR\n", job.user_incar) job.structure.perturb(perturb) print("explicit jobpath", job.explicit_jobpath) if rerun_type == "identical": # like "mkdir -p" (create parent if necessary) os.makedirs(job.job_folder) else: job.write_data_input() for f_name in files_to_copy: try: shutil.copy2('{0.old_folder}/{1}'.format(job, f_name), '{0.job_folder}/{1}'.format(job, f_name)) except Exception as ex: print("error when copying", f_name, ex) if input("[r]emove unconverged folders ? ") == "r": for rundict in unconverged_jobs: unconv_dir = rundict.old_folder # if input("remove {0} ? Y / N ".format(unconv_dir))=="Y" : shutil.rmtree(unconv_dir) print("{} deleted ".format(unconv_dir))