def run_phase(self): archive_dir = self._case.get_value('DOUT_S_ROOT') if os.path.isdir(archive_dir): shutil.rmtree(archive_dir) self.run_indv() # finished running, so all archive files should exist start_date = _date_to_datetime(get_file_date(self._case.get_value('RUN_STARTDATE'))) rest_dir = os.path.join(archive_dir, 'rest') delta_day = datetime.timedelta(1) current_date = start_date + delta_day next_datecheck = current_date days_left = self._case.get_value('STOP_N') final_date = start_date + delta_day * days_left while current_date < final_date: logger.info('Testing archiving with last date: {}'.format(current_date)) current_date_str = '{:04}-{:02}-{:02}'.format(current_date.year, current_date.month, current_date.day) self._case.case_st_archive(last_date_str=current_date_str, copy_only=False) archive_dates = [_date_to_datetime(get_file_date(fname)) for fname in glob.glob(os.path.join(rest_dir, '*'))] while next_datecheck <= current_date: expect(next_datecheck in archive_dates, 'Not all dates generated and/or archived: ' + '{} is missing'.format(next_datecheck)) next_datecheck += delta_day for date in archive_dates: expect(date <= current_date, 'Archived date greater than specified by last-date: ' + '{}'.format(date)) num_days = random.randint(1, min(3, days_left)) days_left -= num_days current_date += num_days * delta_day
def _get_datenames(case): ############################################################################### """ Returns the date objects specifying the times of each file Note we are assuming that the coupler restart files exist and are consistent with other component datenames Not doc-testable due to filesystem dependence """ casename = case.get_value("CASE") rundir = case.get_value("RUNDIR") expect(isdir(rundir), 'Cannot open directory {} '.format(rundir)) # The MULTI_DRIVER option requires a more careful search for dates. # When True, each date has NINST cpl_####.r files. multidriver = case.get_value('MULTI_DRIVER') logger.debug("_get_datenames: multidriver = {} ".format(multidriver)) if multidriver : files = sorted(glob.glob(os.path.join(rundir, casename + '.cpl_0001.r.*.nc'))) else: files = sorted(glob.glob(os.path.join(rundir, casename + '.cpl.r.*.nc'))) logger.debug(" cpl files : {} ".format(files)) if not files: expect(False, 'Cannot find a {}.cpl*.r.*.nc file in directory {} '.format(casename, rundir)) datenames = [] for filename in files: file_date = get_file_date(filename) datenames.append(file_date) return datenames
def case_st_archive(self, last_date_str=None, archive_incomplete_logs=True, copy_only=False, resubmit=True): ############################################################################### """ Create archive object and perform short term archiving """ caseroot = self.get_value("CASEROOT") self.load_env(job="case.st_archive") if last_date_str is not None: try: last_date = get_file_date(last_date_str) except ValueError: expect(False, 'Could not parse the last date to archive') else: last_date = None dout_s_root = self.get_value('DOUT_S_ROOT') if dout_s_root is None or dout_s_root == 'UNSET': expect(False, 'XML variable DOUT_S_ROOT is required for short-term achiver') if not isdir(dout_s_root): os.makedirs(dout_s_root) dout_s_save_interim = self.get_value('DOUT_S_SAVE_INTERIM_RESTART_FILES') if dout_s_save_interim == 'FALSE' or dout_s_save_interim == 'UNSET': rest_n = self.get_value('REST_N') stop_n = self.get_value('STOP_N') if rest_n < stop_n: logger.warning('Restart files from end of run will be saved' 'interim restart files will be deleted') logger.info("st_archive starting") archive = self.get_env('archive') functor = lambda: _archive_process(self, archive, last_date, archive_incomplete_logs, copy_only) run_and_log_case_status(functor, "st_archive", caseroot=caseroot) logger.info("st_archive completed") # resubmit case if appropriate resubmit_cnt = self.get_value("RESUBMIT") logger.debug("resubmit_cnt {} resubmit {}".format(resubmit_cnt, resubmit)) if resubmit_cnt > 0 and resubmit: logger.info( "resubmitting from st_archive, resubmit={:d}".format(resubmit_cnt)) if self.get_value("MACH") == "mira": expect(os.path.isfile(".original_host"), "ERROR alcf host file not found") with open(".original_host", "r") as fd: sshhost = fd.read() run_cmd("ssh cooleylogin1 ssh {} '{}/case.submit {} --resubmit' "\ .format(sshhost, caseroot, caseroot), verbose=True) else: self.submit(resubmit=True) return True
def _get_datenames(casename, rundir): ############################################################################### """ Returns the date objects specifying the times of each file Note we are assuming that the coupler restart files exist and are consistent with other component datenames Not doc-testable due to filesystem dependence """ expect(isdir(rundir), 'Cannot open directory {} '.format(rundir)) files = sorted(glob.glob(os.path.join(rundir, casename + '.cpl.r.*.nc'))) if not files: files = sorted( glob.glob(os.path.join(rundir, casename + '.cpl_0001.r.*.nc'))) logger.debug(" cpl files : {} ".format(files)) if not files: logger.warning( 'Cannot find a {}.cpl*.r.*.nc file in directory {} '.format( casename, rundir)) datenames = [] for filename in files: file_date = get_file_date(filename) datenames.append(file_date) return datenames
def _get_datenames(case): ############################################################################### """ Returns the date objects specifying the times of each file Note we are assuming that the coupler restart files exist and are consistent with other component datenames Not doc-testable due to filesystem dependence """ casename = case.get_value("CASE") rundir = case.get_value("RUNDIR") expect(isdir(rundir), 'Cannot open directory {} '.format(rundir)) # The MULTI_DRIVER option requires a more careful search for dates. # When True, each date has NINST cpl_####.r files. multidriver = case.get_value('MULTI_DRIVER') logger.debug("_get_datenames: multidriver = {} ".format(multidriver)) if multidriver: files = sorted( glob.glob(os.path.join(rundir, casename + '.cpl_0001.r.*.nc'))) else: files = sorted( glob.glob(os.path.join(rundir, casename + '.cpl.r.*.nc'))) logger.debug(" cpl files : {} ".format(files)) if not files: expect( False, 'Cannot find a {}.cpl*.r.*.nc file in directory {} '.format( casename, rundir)) datenames = [] for filename in files: file_date = get_file_date(filename) datenames.append(file_date) return datenames
def _archive_history_files(archive, archive_entry, compclass, compname, histfiles_savein_rundir, last_date, archive_file_fn, dout_s_root, casename, rundir): ############################################################################### """ perform short term archiving on history files in rundir Not doc-testable due to case and file system dependence """ # determine history archive directory (create if it does not exist) archive_histdir = os.path.join(dout_s_root, compclass, 'hist') if not os.path.exists(archive_histdir): os.makedirs(archive_histdir) logger.debug("created directory {}".format(archive_histdir)) # the compname is drv but the files are named cpl if compname == 'drv': compname = 'cpl' if compname == 'clm': compname = r'clm2?' # determine ninst and ninst_string # archive history files - the only history files that kept in the # run directory are those that are needed for restarts for suffix in archive.get_hist_file_extensions(archive_entry): if compname.find('mpas') == 0: newsuffix = compname + r'\d*' else: newsuffix = casename + r'\.' + compname + r'_?' + r'\d*' newsuffix += r'\.' + suffix if not suffix.endswith('$'): newsuffix += r'\.' logger.debug("short term archiving suffix is {} ".format(newsuffix)) pfile = re.compile(newsuffix) histfiles = [f for f in os.listdir(rundir) if pfile.search(f)] logger.debug("histfiles = {} ".format(histfiles)) if histfiles: for histfile in histfiles: file_date = get_file_date(os.path.basename(histfile)) if last_date is None or file_date is None or file_date <= last_date: srcfile = join(rundir, histfile) expect(os.path.isfile(srcfile), "history file {} does not exist ".format(srcfile)) destfile = join(archive_histdir, histfile) if histfile in histfiles_savein_rundir: logger.info("copying {} to {} ".format( srcfile, destfile)) safe_copy(srcfile, destfile) else: logger.info("moving {} to {} ".format( srcfile, destfile)) archive_file_fn(srcfile, destfile)
def _archive_history_files(archive, archive_entry, compclass, compname, histfiles_savein_rundir, last_date, archive_file_fn, dout_s_root, casename, rundir): ############################################################################### """ perform short term archiving on history files in rundir Not doc-testable due to case and file system dependence """ # determine history archive directory (create if it does not exist) archive_histdir = os.path.join(dout_s_root, compclass, 'hist') if not os.path.exists(archive_histdir): os.makedirs(archive_histdir) logger.debug("created directory {}".format(archive_histdir)) # the compname is drv but the files are named cpl if compname == 'drv': compname = 'cpl' if compname == 'clm': compname = r'clm2?' # determine ninst and ninst_string # archive history files - the only history files that kept in the # run directory are those that are needed for restarts for suffix in archive.get_hist_file_extensions(archive_entry): if compname.find('mpas') == 0 or compname == 'mali': newsuffix = compname + r'\d*' else: newsuffix = casename + r'\.' + compname + r'_?' + r'\d*' newsuffix += r'\.' + suffix if not suffix.endswith('$'): newsuffix += r'\.' logger.debug("short term archiving suffix is {} ".format(newsuffix)) pfile = re.compile(newsuffix) histfiles = [f for f in os.listdir(rundir) if pfile.search(f)] logger.debug("histfiles = {} ".format(histfiles)) if histfiles: for histfile in histfiles: file_date = get_file_date(os.path.basename(histfile)) if last_date is None or file_date is None or file_date <= last_date: srcfile = join(rundir, histfile) expect(os.path.isfile(srcfile), "history file {} does not exist ".format(srcfile)) destfile = join(archive_histdir, histfile) if histfile in histfiles_savein_rundir: logger.info("copying {} to {} ".format(srcfile, destfile)) safe_copy(srcfile, destfile) else: logger.info("moving {} to {} ".format(srcfile, destfile)) archive_file_fn(srcfile, destfile)
def case_st_archive(self, last_date_str=None, archive_incomplete_logs=True, copy_only=False, resubmit=True): ############################################################################### """ Create archive object and perform short term archiving """ caseroot = self.get_value("CASEROOT") self.load_env(job="case.st_archive") if last_date_str is not None: try: last_date = get_file_date(last_date_str) except ValueError: expect(False, 'Could not parse the last date to archive') else: last_date = None dout_s_root = self.get_value('DOUT_S_ROOT') if dout_s_root is None or dout_s_root == 'UNSET': expect(False, 'XML variable DOUT_S_ROOT is required for short-term achiver') if not isdir(dout_s_root): os.makedirs(dout_s_root) dout_s_save_interim = self.get_value('DOUT_S_SAVE_INTERIM_RESTART_FILES') if dout_s_save_interim == 'FALSE' or dout_s_save_interim == 'UNSET': rest_n = self.get_value('REST_N') stop_n = self.get_value('STOP_N') if rest_n < stop_n: logger.warning('Restart files from end of run will be saved' 'interim restart files will be deleted') logger.info("st_archive starting") archive = self.get_env('archive') functor = lambda: _archive_process(self, archive, last_date, archive_incomplete_logs, copy_only) run_and_log_case_status(functor, "st_archive", caseroot=caseroot) logger.info("st_archive completed") # resubmit case if appropriate resubmit_cnt = self.get_value("RESUBMIT") logger.debug("resubmit_cnt {} resubmit {}".format(resubmit_cnt, resubmit)) if resubmit_cnt > 0 and resubmit: logger.info("resubmitting from st_archive, resubmit={:d}".format(resubmit_cnt)) if self.get_value("MACH") == "mira": expect(os.path.isfile(".original_host"), "ERROR alcf host file not found") with open(".original_host", "r") as fd: sshhost = fd.read() run_cmd("ssh cooleylogin1 ssh {} '{case}/case.submit {case} --resubmit' "\ .format(sshhost, case=caseroot), verbose=True) else: self.submit(resubmit=True) return True
def _get_datenames(casename, rundir): ############################################################################### """ Returns the date objects specifying the times of each file Note we are assuming that the coupler restart files exist and are consistent with other component datenames Not doc-testable due to filesystem dependence """ expect(isdir(rundir), 'Cannot open directory {} '.format(rundir)) files = sorted(glob.glob(os.path.join(rundir, casename + '.cpl.r.*.nc'))) if not files: files = sorted(glob.glob(os.path.join(rundir, casename + '.cpl_0001.r.*.nc'))) logger.debug(" cpl files : {} ".format(files)) if not files: logger.warning('Cannot find a {}.cpl*.r.*.nc file in directory {} '.format(casename, rundir)) datenames = [] for filename in files: file_date = get_file_date(filename) datenames.append(file_date) return datenames
def _archive_restarts_date_comp(case, archive, archive_entry, compclass, compname, datename, datename_is_last, last_date, archive_restdir, archive_file_fn, link_to_last_restart_files=False): ############################################################################### """ Archive restart files for a single date and single component If link_to_last_restart_files is True, then make a symlink to the last set of restart files (i.e., the set with datename_is_last True); if False (the default), copy them. (This has no effect on the history files that are associated with these restart files.) """ rundir = case.get_value("RUNDIR") casename = case.get_value("CASE") datename_str = _datetime_str(datename) if datename_is_last or case.get_value('DOUT_S_SAVE_INTERIM_RESTART_FILES'): if not os.path.exists(archive_restdir): os.makedirs(archive_restdir) # archive the rpointer file(s) for this datename and all possible ninst_strings _archive_rpointer_files(casename, _get_ninst_info(case, compclass)[1], rundir, case.get_value('DOUT_S_SAVE_INTERIM_RESTART_FILES'), archive, archive_entry, archive_restdir, datename, datename_is_last) # determine ninst and ninst_string ninst, ninst_strings = _get_ninst_info(case, compclass) # move all but latest restart files into the archive restart directory # copy latest restart files to archive restart directory histfiles_savein_rundir = [] # determine function to use for last set of restart files if link_to_last_restart_files: last_restart_file_fn = symlink_force last_restart_file_fn_msg = "linking" else: last_restart_file_fn = shutil.copy last_restart_file_fn_msg = "copying" # the compname is drv but the files are named cpl if compname == 'drv': compname = 'cpl' casename = re.escape(casename) # get file_extension suffixes for suffix in archive.get_rest_file_extensions(archive_entry): for i in range(ninst): restfiles = "" if compname.find("mpas") == 0: pattern = compname + r'\.' + suffix + r'\.' + '_'.join(datename_str.rsplit('-', 1)) pfile = re.compile(pattern) restfiles = [f for f in os.listdir(rundir) if pfile.search(f)] else: pattern = r"{}.{}[\d_]*\..*".format(casename, compname) pfile = re.compile(pattern) files = [f for f in os.listdir(rundir) if pfile.search(f)] if ninst_strings: pattern = ninst_strings[i] + r'\.' + suffix + r'\.' + datename_str else: pattern = r'\.' + suffix + r'\.' + datename_str pfile = re.compile(pattern) restfiles = [f for f in files if pfile.search(f)] logger.debug("pattern is {} restfiles {}".format(pattern, restfiles)) for restfile in restfiles: restfile = os.path.basename(restfile) file_date = get_file_date(restfile) if last_date is not None and file_date > last_date: # Skip this file continue if not os.path.exists(archive_restdir): os.makedirs(archive_restdir) # obtain array of history files for restarts # need to do this before archiving restart files histfiles_for_restart = get_histfiles_for_restarts(rundir, archive, archive_entry, restfile) if datename_is_last and histfiles_for_restart: for histfile in histfiles_for_restart: if histfile not in histfiles_savein_rundir: histfiles_savein_rundir.append(histfile) # archive restart files and all history files that are needed for restart # Note that the latest file should be copied and not moved if datename_is_last: srcfile = os.path.join(rundir, restfile) destfile = os.path.join(archive_restdir, restfile) last_restart_file_fn(srcfile, destfile) logger.debug("{} {} \n {} to \n {}".format( "datename_is_last", last_restart_file_fn_msg, srcfile, destfile)) for histfile in histfiles_for_restart: srcfile = os.path.join(rundir, histfile) destfile = os.path.join(archive_restdir, histfile) expect(os.path.isfile(srcfile), "history restart file {} for last date does not exist ".format(srcfile)) shutil.copy(srcfile, destfile) logger.debug("datename_is_last + histfiles_for_restart copying \n {} to \n {}".format(srcfile, destfile)) else: # Only archive intermediate restarts if requested - otherwise remove them if case.get_value('DOUT_S_SAVE_INTERIM_RESTART_FILES'): srcfile = os.path.join(rundir, restfile) destfile = os.path.join(archive_restdir, restfile) expect(os.path.isfile(srcfile), "restart file {} does not exist ".format(srcfile)) archive_file_fn(srcfile, destfile) logger.debug("_archive_restarts_date_comp; moving \n {} to \n {}".format(srcfile, destfile)) # need to copy the history files needed for interim restarts - since # have not archived all of the history files yet for histfile in histfiles_for_restart: srcfile = os.path.join(rundir, histfile) destfile = os.path.join(archive_restdir, histfile) expect(os.path.isfile(srcfile), "hist file {} does not exist ".format(srcfile)) shutil.copy(srcfile, destfile) logger.debug("interim_restarts: copying \n {} to \n {}".format(srcfile, destfile)) else: srcfile = os.path.join(rundir, restfile) logger.debug("removing interim restart file {}".format(srcfile)) if (os.path.isfile(srcfile)): try: os.remove(srcfile) except OSError: logger.warning("unable to remove interim restart file {}".format(srcfile)) else: logger.warning("interim restart file {} does not exist".format(srcfile)) return histfiles_savein_rundir
def _archive_history_files(case, archive, archive_entry, compclass, compname, histfiles_savein_rundir, last_date, archive_file_fn): ############################################################################### """ perform short term archiving on history files in rundir Not doc-testable due to case and file system dependence """ # determine history archive directory (create if it does not exist) dout_s_root = case.get_value("DOUT_S_ROOT") casename = re.escape(case.get_value("CASE")) archive_histdir = os.path.join(dout_s_root, compclass, 'hist') if not os.path.exists(archive_histdir): os.makedirs(archive_histdir) logger.debug("created directory {}".format(archive_histdir)) # the compname is drv but the files are named cpl if compname == 'drv': compname = 'cpl' # determine ninst and ninst_string ninst, ninst_string = _get_ninst_info(case, compclass) # archive history files - the only history files that kept in the # run directory are those that are needed for restarts rundir = case.get_value("RUNDIR") for suffix in archive.get_hist_file_extensions(archive_entry): for i in range(ninst): if ninst_string: if compname.find('mpas') == 0: # Not correct, but MPAS' multi-instance name format is unknown. newsuffix = compname + r'\d*' else: newsuffix = casename + r'\.' + compname + r'\d*' + ninst_string[i] else: if compname.find('mpas') == 0: newsuffix = compname + r'\d*' else: newsuffix = casename + r'\.' + compname + r'\d*' newsuffix += r'\.' + suffix if not suffix.endswith('$'): newsuffix += r'\.' logger.debug("short term archiving suffix is {} ".format(newsuffix)) pfile = re.compile(newsuffix) histfiles = [f for f in os.listdir(rundir) if pfile.search(f)] logger.debug("histfiles = {} ".format(histfiles)) if histfiles: for histfile in histfiles: file_date = get_file_date(os.path.basename(histfile)) if last_date is None or file_date is None or file_date <= last_date: srcfile = join(rundir, histfile) expect(os.path.isfile(srcfile), "history file {} does not exist ".format(srcfile)) destfile = join(archive_histdir, histfile) if histfile in histfiles_savein_rundir: logger.debug("histfiles_savein_rundir; copying \n {} to \n {} ".format(srcfile, destfile)) shutil.copy(srcfile, destfile) else: logger.debug("_archive_history_files; moving \n {} to \n {} ".format(srcfile, destfile)) archive_file_fn(srcfile, destfile)
datename_str.rsplit('-', 1)) pfile = re.compile(pattern) restfiles = [f for f in os.listdir(rundir) if pfile.search(f)] else: pattern = r"^{}\.{}[\d_]*\.".format(casename, compname) pfile = re.compile(pattern) files = [f for f in os.listdir(rundir) if pfile.search(f)] pattern = r'_?' + r'\d*' + r'\.' + suffix + r'\.' + r'[^\.]*' + r'\.?' + datename_str pfile = re.compile(pattern) restfiles = [f for f in files if pfile.search(f)] logger.debug("pattern is {} restfiles {}".format( pattern, restfiles)) for restfile in restfiles: restfile = os.path.basename(restfile) file_date = get_file_date(restfile) if last_date is not None and file_date > last_date: # Skip this file continue if not os.path.exists(archive_restdir): os.makedirs(archive_restdir) # obtain array of history files for restarts # need to do this before archiving restart files histfiles_for_restart = get_histfiles_for_restarts( rundir, archive, archive_entry, restfile, testonly=testonly) if datename_is_last and histfiles_for_restart: for histfile in histfiles_for_restart: if histfile not in histfiles_savein_rundir:
else: newsuffix = casename + r'\.' + compname + r'\d*' newsuffix += r'\.' + suffix if not suffix.endswith('$'): newsuffix += r'\.' logger.debug( "short term archiving suffix is {} ".format(newsuffix)) pfile = re.compile(newsuffix) histfiles = [f for f in os.listdir(rundir) if pfile.search(f)] logger.debug("histfiles = {} ".format(histfiles)) if histfiles: for histfile in histfiles: file_date = get_file_date(os.path.basename(histfile)) if last_date is None or file_date is None or file_date <= last_date: srcfile = join(rundir, histfile) expect( os.path.isfile(srcfile), "history file {} does not exist ".format(srcfile)) destfile = join(archive_histdir, histfile) if histfile in histfiles_savein_rundir: logger.debug( "histfiles_savein_rundir; copying \n {} to \n {} " .format(srcfile, destfile)) shutil.copy(srcfile, destfile) else: logger.debug( "_archive_history_files; moving \n {} to \n {} " .format(srcfile, destfile))
def _archive_history_files(archive, compclass, compname, histfiles_savein_rundir, last_date, archive_file_fn, dout_s_root, casename, rundir): ############################################################################### """ perform short term archiving on history files in rundir Not doc-testable due to case and file system dependence """ # determine history archive directory (create if it does not exist) archive_histdir = os.path.join(dout_s_root, compclass, 'hist') if not os.path.exists(archive_histdir): os.makedirs(archive_histdir) logger.debug("created directory {}".format(archive_histdir)) # the compname is drv but the files are named cpl if compname == 'drv': compname = 'cpl' if compname == 'nemo': archive_rblddir = os.path.join(dout_s_root, compclass, 'rebuild') if not os.path.exists(archive_rblddir): os.makedirs(archive_rblddir) logger.debug("created directory {}".format(archive_rblddir)) sfxrbld = r'mesh_mask_' + r'[0-9]*' pfile = re.compile(sfxrbld) rbldfiles = [f for f in os.listdir(rundir) if pfile.search(f)] logger.debug("rbldfiles = {} ".format(rbldfiles)) if rbldfiles: for rbldfile in rbldfiles: srcfile = join(rundir, rbldfile) destfile = join(archive_rblddir, rbldfile) logger.info("moving {} to {} ".format(srcfile, destfile)) archive_file_fn(srcfile, destfile) sfxhst = casename + r'_[0-9][mdy]_' + r'[0-9]*' pfile = re.compile(sfxhst) hstfiles = [f for f in os.listdir(rundir) if pfile.search(f)] logger.debug("hstfiles = {} ".format(hstfiles)) if hstfiles: for hstfile in hstfiles: srcfile = join(rundir, hstfile) destfile = join(archive_histdir, hstfile) logger.info("moving {} to {} ".format(srcfile, destfile)) archive_file_fn(srcfile, destfile) # determine ninst and ninst_string # archive history files - the only history files that kept in the # run directory are those that are needed for restarts histfiles = archive.get_all_hist_files(casename, compname, rundir) if histfiles: for histfile in histfiles: file_date = get_file_date(os.path.basename(histfile)) if last_date is None or file_date is None or file_date <= last_date: srcfile = join(rundir, histfile) expect(os.path.isfile(srcfile), "history file {} does not exist ".format(srcfile)) destfile = join(archive_histdir, histfile) if histfile in histfiles_savein_rundir: logger.info("copying {} to {} ".format(srcfile, destfile)) safe_copy(srcfile, destfile) else: logger.info("moving {} to {} ".format(srcfile, destfile)) archive_file_fn(srcfile, destfile)
elif compname == 'nemo': pattern = r'_*_' + suffix + r'[0-9]*' pfile = re.compile(pattern) restfiles = [f for f in os.listdir(rundir) if pfile.search(f)] else: pattern = r"^{}\.{}[\d_]*\.".format(casename, compname) pfile = re.compile(pattern) files = [f for f in os.listdir(rundir) if pfile.search(f)] pattern = r'_?' + r'\d*' + r'\.' + suffix + r'\.' + r'[^\.]*' + r'\.?' + datename_str pfile = re.compile(pattern) restfiles = [f for f in files if pfile.search(f)] logger.debug("pattern is {} restfiles {}".format(pattern, restfiles)) for rfile in restfiles: rfile = os.path.basename(rfile) file_date = get_file_date(rfile) if last_date is not None and file_date > last_date: # Skip this file continue if not os.path.exists(archive_restdir): os.makedirs(archive_restdir) # obtain array of history files for restarts # need to do this before archiving restart files histfiles_for_restart = get_histfiles_for_restarts(rundir, archive, archive_entry, rfile, testonly=testonly) if datename_is_last and histfiles_for_restart: for histfile in histfiles_for_restart:
def case_st_archive( self, last_date_str=None, archive_incomplete_logs=True, copy_only=False, resubmit=True, ): ############################################################################### """ Create archive object and perform short term archiving """ logger.debug("resubmit {}".format(resubmit)) caseroot = self.get_value("CASEROOT") self.load_env(job="case.st_archive") if last_date_str is not None: try: last_date = get_file_date(last_date_str) except ValueError: expect(False, "Could not parse the last date to archive") else: last_date = None dout_s_root = self.get_value("DOUT_S_ROOT") if dout_s_root is None or dout_s_root == "UNSET": expect(False, "XML variable DOUT_S_ROOT is required for short-term achiver") if not isdir(dout_s_root): os.makedirs(dout_s_root) dout_s_save_interim = self.get_value("DOUT_S_SAVE_INTERIM_RESTART_FILES") if dout_s_save_interim == "FALSE" or dout_s_save_interim == "UNSET": rest_n = self.get_value("REST_N") stop_n = self.get_value("STOP_N") if rest_n < stop_n: logger.warning("Restart files from end of run will be saved" "interim restart files will be deleted") logger.info("st_archive starting") is_batch = self.get_value("BATCH_SYSTEM") msg_func = None if is_batch: jobid = batch_jobid() msg_func = lambda *args: jobid if jobid is not None else "" archive = self.get_env("archive") functor = lambda: _archive_process(self, archive, last_date, archive_incomplete_logs, copy_only) run_and_log_case_status( functor, "st_archive", custom_starting_msg_functor=msg_func, custom_success_msg_functor=msg_func, caseroot=caseroot, is_batch=is_batch, ) logger.info("st_archive completed") # resubmit case if appropriate if not self.get_value("EXTERNAL_WORKFLOW") and resubmit: resubmit_cnt = self.get_value("RESUBMIT") logger.debug("resubmit_cnt {} resubmit {}".format( resubmit_cnt, resubmit)) if resubmit_cnt > 0: logger.info("resubmitting from st_archive, resubmit={:d}".format( resubmit_cnt)) if self.get_value("MACH") == "mira": expect(os.path.isfile(".original_host"), "ERROR alcf host file not found") with open(".original_host", "r") as fd: sshhost = fd.read() run_cmd( "ssh cooleylogin1 ssh {} '{case}/case.submit {case} --resubmit' " .format(sshhost, case=caseroot), verbose=True, ) else: self.submit(resubmit=True) return True