def parse_job_state(path): # print("") # print(path) CC = ComputerCluster() job_state = CC.cluster.job_state(path_i=path) return ({"job_state": job_state})
def submit_job( wall_time_i=None, nodes_i=None, job_0_dir_i=None, ): CC = ComputerCluster() if os.environ["COMPENV"] == "sherlock": def_params = { "wall_time": wall_time_i, "nodes": nodes_i, "path_i": job_0_dir_i } elif os.environ["COMPENV"] == "slac": def_params = { "wall_time": wall_time_i, "cpus": 12, "queue": "suncat2", "path_i": job_0_dir_i } else: def_params = { "wall_time": wall_time_i, # "queue": "premium", "queue": "regular", "architecture": "knl", "nodes": nodes_i, "path_i": job_0_dir_i } CC.submit_job(**def_params)
def parse_job_state(path): """ """ #| - parse_job_state CC = ComputerCluster() job_state = CC.cluster.job_state(path_i=path) return ({"job_state": job_state})
def __init__(self, tree_level=None, level_entries=None, indiv_dir_lst=None, indiv_job_lst=None, skip_dirs_lst=None, root_dir=".", working_dir=".", # root_dir=".", folders_exist=None, ): """Initialize DFT_Jobs_Setup Instance. Args: tree_level: level_entries: indiv_dir_lst: indiv_job_lst: List of dictionaries representing jobs skip_dirs_lst: working_dir: folders_exist: """ #| - __init__ #| - Initializing Some Class Attributes self.order_dict = None self.job_var_lst = None self.Job_list = [] self.sep = "-" self.level_entries = level_entries self.tree_level_labels = tree_level self.level_entries_list = level_entries self.skip_dirs_lst = skip_dirs_lst self.indiv_dir_lst = indiv_dir_lst self.indiv_job_lst = indiv_job_lst #__| self.root_dir = self.__set_root_dir__(root_dir) self.working_dir = self.__set_working_dir__(working_dir) self.cluster = ComputerCluster() self.jobs_att = self.__load_jobs_attributes__() self.__create_jobs_bin__() self.folders_exist = self.__folders_exist__(folders_exist) self.load_dir_struct() self.__create_dir_structure_file__() self.num_jobs = self.__number_of_jobs__() self.__Job_list__() self.data_frame = self.__gen_datatable__() # if self.folders_exist: # # self.data_frame = self.__generate_data_table__() self.check_inputs()
def set_up__submit__new_job( latest_revision, new_job_file_dict, run_calc=False, ): """ """ # | - set_up__submit__new_job path = latest_revision["path"] pre_path = latest_revision["pre_path"] current_revision = latest_revision["revision"] num_prev_steps = latest_revision["num_steps"] job_state = latest_revision["job_state"] timed_out = latest_revision["timed_out"] isif = latest_revision["isif"] new_revision = current_revision + 1 job_0_dir_i = os.path.join(pre_path, "_" + str(new_revision)) try: os.makedirs(job_0_dir_i) except: pass # | - Copy files into new directory for file_path_i, dist_name in new_job_file_dict.items(): copyfile(file_path_i, os.path.join(job_0_dir_i, dist_name)) #__| dft_params_dict = { # "encut": 600, # "kpar": 5, # "ediffg": 5e-3, # "ediff": 1e-6 } if os.environ["COMPENV"] == "sherlock": print("iksfijsijfisddfi8998y0934389 | TEMP") print("In Sherlock") dft_params_dict["npar"] = 4 # Write atoms object # atoms_i = row_i["init_atoms"] # io.write(os.path.join(job_0_dir_i, "init.cif"), row_i["init_atoms"]) atoms_i = io.read(os.path.join(job_0_dir_i, "init.cif")) if num_prev_steps < 10: wall_time_factor = 2.5 elif num_prev_steps < 4: wall_time_factor = 3 else: wall_time_factor = 1.8 num_atoms = atoms_i.get_number_of_atoms() wall_time_i = calc_wall_time(num_atoms, factor=wall_time_factor) wall_time_i = int(wall_time_i) if os.environ["COMPENV"] != "slac": if wall_time_i > 600: wall_time_i = 600 else: wall_time_i = 8. * wall_time_i if wall_time_i > 2760: wall_time_i = 2760 if num_atoms > 100: nodes_i = 10 dft_params_dict["kpar"] = 10 else: nodes_i = 5 dft_params_dict["kpar"] = 5 if os.environ["COMPENV"] == "slac": dft_params_dict["kpar"] = 3 dft_params_dict["npar"] = 4 # Write dft paramters json file to job dir with open(os.path.join(job_0_dir_i, "dft-params.json"), "w+") as fle: json.dump(dft_params_dict, fle, indent=2, skipkeys=True) # # Copy model job script # copyfile(model_file, os.path.join(job_0_dir_i, "model.py")) # Submit job ############################################################## CC = ComputerCluster() wall_time_i = int(wall_time_i) def_params = { "wall_time": wall_time_i, # "queue": "premium", "queue": "regular", "architecture": "knl", "nodes": nodes_i, "priority": "scavenger", "path_i": job_0_dir_i } if os.environ["COMPENV"] == "sherlock": print("Found SHERLOCK env var") def_params = { "wall_time": wall_time_i, "nodes": nodes_i, "queue": "iric", "path_i": job_0_dir_i } elif os.environ["COMPENV"] == "slac": print("Found SLAC env var") def_params = { "wall_time": wall_time_i, "cpus": 12, "queue": "suncat2", "path_i": job_0_dir_i } if run_calc: CC.submit_job(**def_params)
def test_check(self): """Attempts to set params based on other dependent parameters. Ex.) If spinpol == True, then sigma should be smaller, 0.01ish (Charlotte told me) Only set this dependent param if the user has not explicitly done so, is what the condition for setting a parameter automatically is. """ # | - test_check # | - Setting dw to 10 * pw if self.mod_dict["dw"] is False: dw_i = 10. * self.params["pw"] self.update_params({"dw": dw_i}, user_update=False) # __| # | - Decreasing Sigma for Spin Polarization Calculations if self.mod_dict["sigma"] is False: if self.params["spinpol"] is True: sigma_i = 0.02 self.update_params({"sigma": sigma_i}, user_update=False) # __| # | - BEEF Ensemble of Energies ========================================= # | - Removing Beef-Ensemble if XC-Functional Not BEEF xc_list = self.params["xc"] if "beefensemble" in self.params: if self.params["beefensemble"] is True and "BEEF" not in xc_list: print("Functional not compatible with BEEF-ensemble method") self.update_params({"beefensemble": False}, user_update=False) self.update_params({"printensemble": False}, user_update=False) else: pass # __| # | - Turn on printensemble Parameter for BEEF Ensemble of Energies xc_list = self.params["xc"] if "beefensemble" in self.params: if self.params["beefensemble"] is True and "BEEF" in xc_list: print("Espresso_Params | " "test_check | Turning on printensemble") self.update_params({"printensemble": True}, user_update=False) else: pass # __| # | - Turn off BEEF on AWS # NOTE This is new (180412 - RF), check that it works CC = ComputerCluster() if CC.cluster_sys == "aws": if "beefensemble" in self.params: if self.params["beefensemble"] is True: print( "Espresso_Params | " "test_check | Attempting to use BEEF ensemble on AWS, " "which doesn't support it at this time, " "BEEF ensemble tags will be turned off") self.update_params( {"beefensemble": False}, user_update=False, ) self.update_params( {"printensemble": False}, user_update=False, ) # __| # __| ================================================================== # | - Parallelization if self.mod_dict["parflags"] is False: if type(self.submission_params) == dict: if "nodes" in self.submission_params: num_nodes = self.submission_params["nodes"] self.update_params( {"parflags": "-npool " + str(int(num_nodes))}, user_update=False, ) # TEMP_PRINT print("098sddfkfs--s-s-__-_") print("-npool " + str(int(num_nodes)))
def submit_job( # wall_time_i=None, nodes_i=None, path_i=None, num_atoms=None, wall_time_factor=1.0, queue=None, ): #| - submit_job #| - Wall time # wall_time_i = calc_wall_time(num_atoms, factor=wall_time_factor) wall_time_i = calc_wall_time(num_atoms=num_atoms, num_scf=50) print("wall_time_i:", wall_time_i) wall_time_i = wall_time_factor * wall_time_i print("wall_time_i:", wall_time_i) #__| CC = ComputerCluster() if os.environ["COMPENV"] == "nersc": #| - nersc # nodes_i = 1 nodes_i = 10 dft_params_file = os.path.join(path_i, "dft-params.json") my_file = Path(dft_params_file) if my_file.is_file(): with open(dft_params_file, "r") as fle: dft_calc_settings = json.load(fle) kpar_i = dft_calc_settings.get("kpar", None) npar_i = dft_calc_settings.get("npar", None) if kpar_i == 10 and npar_i == 6: nodes_i = 10 else: nodes_i = 1 def_params = { "wall_time": wall_time_i, # "queue": "premium", "queue": "regular", "architecture": "knl", "nodes": nodes_i, "path_i": path_i} #__| elif os.environ["COMPENV"] == "sherlock": #| - sherlock def_params = { "wall_time": wall_time_i, # "nodes": nodes_i, "nodes": 1, "path_i": path_i} #__| elif os.environ["COMPENV"] == "slac": #| - slac print("queue:", queue) if queue == "suncat": cpus_i = 8 elif queue == "suncat2": cpus_i = 12 elif queue == "suncat3": cpus_i = 16 def_params = { "wall_time": wall_time_i, "cpus": cpus_i, "queue": queue, "path_i": path_i} #__| else: def_params = { "wall_time": wall_time_i, # "queue": "premium", "queue": "regular", # "architecture": "knl", "nodes": 1, "path_i": path_i} CC.submit_job(**def_params)