def parse_args(descr=__doc__): """Parse command line arguments""" parser = ArgumentParser( description=descr, formatter_class=ArgumentDefaultsHelpFormatter ) parser.add_argument( '-d', '--dir', default=None, help='''Directory in which to find the files for cleaning.''' ) parser.add_argument( '-f', '--file', default=None, help='''Single file to clean.''' ) parser.add_argument( '--remove-empty', action='store_true', help='''Remove empty (0-length) files prior to cleaning up locks.''' ) parser.add_argument( '--deep-clean', action='store_true', help='''Look inside I3 files and remove or rename if the recos are missing or incomplete as compared to those indicated by the filename. Also, merge files that come from the same event.''' ) args = parser.parse_args() if args.dir: assert not args.file args.dir = abspath(expand(args.dir)) assert isdir(args.dir) else: args.file = abspath(expand(args.file)) assert isfile(args.file) # CleanupRecoFiles takes a list of strings to be list of filenames, so # just replace args.dir with list containing this one filename args.dir = [args.file] return args
def get_recos(frame): """Get reconstruction #'s from an I3 file frame. Looks for all recos specified in `RECO`, where the reconstruction's name appended with `FIT_FIELD_SUFFIX` (e.g. "_FitParams"; defined in repeated_reco.py) is expected to be in the frame for that reconstruction to be deemed present. Parameters ---------- frame : I3Frame Frame to inspect Returns ------- recos : list of integers Reconstruction numbers found in the frame, where numbers are references to the `RECO` constant defined in `repeated_reco.py`. """ from icecube import dataio, multinest_icetray if isinstance(frame, basestring): filepath = expand(frame) try: i3file = dataio.I3File(filepath) except: exc_str = ''.join(format_exception(*sys.exc_info())) wstdout('ERROR! Could not open file "%s"\n%s\n' % (filepath, exc_str)) return [] try: frame = i3file.pop_physics() except: exc_str = ''.join(format_exception(*sys.exc_info())) wstdout('ERROR! Could not find P frame in file "%s"\n%s\n' % (filepath, exc_str)) return [] else: i3file.close() keys = frame.keys() recos = [] for reco_num, reco_info in enumerate(RECOS): field_name = reco_info['name'] + FIT_FIELD_SUFFIX #if field_name in keys and not frame[field_name].has_reached_time_limit: if field_name in keys: recos.append(reco_num) return sorted(recos)
def __init__(self, dirpath): self.dirpath = dirpath if isinstance(dirpath, basestring): self.dirpath = abspath(expand(dirpath)) assert isdir(self.dirpath) self.refresh_listing() elif isinstance(dirpath, Iterable): self._allfilepaths = [] for filepath in dirpath: if not isinstance(filepath, basestring): raise ValueError('each item must be a string') if not isfile(filepath): raise ValueError('path "%s" is not a file!' % filepath) self._allfilepaths.append(filepath) else: raise ValueError('`dirpath` must be a string or iterable thereof') self.removed = [] self.renamed = [] self.kept = [] self.failed_to_remove = [] self.failed_to_rename = []
def main(): """Main""" start_time_sec = time.time() args = parse_args() def _sigint_handler(signal, frame): # pylint: disable=unused-argument, redefined-outer-name wstderr('=' * 79 + '\n') wstderr('*** CAUGHT CTL-C (sigint) *** ... attempting to cleanup!\n') wstderr('=' * 79 + '\n') raise KeyboardInterrupt # Import IceCube things now from I3Tray import I3Tray # pylint: disable=import-error from icecube import dataclasses, dataio, icetray, multinest_icetray # pylint: disable=unused-variable, import-error from cluster import get_spline_tables lock_info = get_process_info() wstdout('=' * 79 + '\n') for d in [vars(args), lock_info]: wstdout('\n') wstdout('\n'.join([(('%20s' % k) + ' = %s' % d[k]) for k in sorted(d.keys())])) wstdout('\n' * 2) file_lister = FileLister(infile=args.infile, indir=args.indir) event_counter = EventCounter(srt_pulse_name=args.srt_pulse_name, skip=args.skip, n_events=args.n_events) expiration = time.time() + args.seconds_remaining expiration_timestamp = timestamp(at=expiration, utc=True) while True: infile_path = file_lister.get_next_file() if infile_path is None: wstdout('> No more files that can be processed. Quitting.\n') break # NOTE: cannot run on a file that has _all_ recos already run, since # output file cannot be same as input file (which it will have same # name, since the name is derived from recos run / etc.) already_run = recos_from_path(infile_path) # NOTE: now skipping a reco is determined ONLY by the "If" kwarg, and # not by the filename at all (swap the comment on the next line for the # line below to change behavior back) #recos_not_run_yet = sorted(set(args.requested) - set(already_run)) recos_not_run_yet = sorted(set(args.requested)) if not recos_not_run_yet: wstdout('> Nothing more to be done on file. Moving on. ("%s")\n' % infile_path) continue # See if file still exists if not isfile(infile_path): wstdout('> File no longer exists. Moving on. ("%s")\n' % infile_path) continue # Skip if empty input files if getsize(infile_path) == 0: wstdout('> Input file is 0-length. Moving on. ("%s")\n' % infile_path) continue # NOTE: commenting out the following and forcing an extremely long # timeout to allow all recos to run (of which many won't have to, # becuase they've already been run). Uncomment the following three # lines and comment out the "time_remaining =" line below to change the # behavior back when most or all recos have to be run #time_remaining = np.ceil( # args.seconds_remaining - (time.time() - start_time_sec) #) time_remaining = 3600 * 24 * 10000 # See if any reco at all fits in the remaining time if time_remaining <= MIN_RECO_TIME: wstdout('Not enough time to run *any* reco. Quitting.\n') break # See if any of the recos needing to be run on *this* file fit in the # remaining time; register all `reco_num`s that can be run recos_to_run = [] after_proc_time_remaining = time_remaining for reco_num in recos_not_run_yet: time_limit = RECOS[reco_num]['time_limit'] if time_limit > after_proc_time_remaining: continue recos_to_run.append(reco_num) after_proc_time_remaining -= time_limit time_to_run_processing = time_remaining - after_proc_time_remaining # Give the lock an extra minute beyond the strict time to run expiration = time.time() + time_to_run_processing + 60 expiration_timestamp = timestamp(at=expiration, utc=True) if not recos_to_run: wstdout('Not enough time to run any remaining reco on file. Moving' ' on. ("%s")\n' % infile_path) continue infile_lock_f, outfile_lock_f = None, None infile_lock_path = infile_path + LOCK_SUFFIX outfile_lock_path = None allrecos = set(recos_to_run).union(already_run) outfile_name = basename( path_from_recos(orig_path=infile_path, recos=allrecos)) outfile_path = abspath(expand(join(args.outdir, outfile_name))) #print('args.outdir: "%s", outfile_name: "%s", outfile_path: "%s"' # % (args.outdir, outfile_name, outfile_path)) #break # debug outfile_lock_path = outfile_path + LOCK_SUFFIX if outfile_name == infile_path or outfile_path == infile_path: wstdout('Outfile is same as infile, which will lead to removal of' ' infile. Path = "%s" ; Moving on to next input file.\n' % infile_path) continue lock_info['acquired_at'] = timestamp(utc=True) lock_info['expires_at'] = expiration_timestamp lock_info['infile'] = infile_path lock_info['outfile'] = outfile_path if isfile(outfile_path): wstdout('> Outfile path exists; will overwrite if both infile and' ' outfile locks can be obtained! ...\n' '> "%s"\n' % outfile_path) # NOTE: # Create lockfiles (if they don't exist) for each of the infile and # outfile, and try to acquire exclusive locks on these before # working with either the infile or outfile. # # Also: write info to the lockfiles to know when it's okay to clean # each up manually. Note that the `flock` will be removed by the OS # as soon as the lock file is closed or when this process dies. lock_info['type'] = 'infile_lock' try: if args.use_locks: infile_lock_f = acquire_lock(infile_lock_path, lock_info) except IOError: wstdout('> infile lock failed to be obtained.' ' "%s"\n' % infile_lock_path) infile_lock_f = None continue lock_info['type'] = 'outfile_lock' try: if args.use_locks: outfile_lock_f = acquire_lock(outfile_lock_path, lock_info) except IOError: wstdout('ERROR: outfile lock failed to be obtained.' ' Cleaning up infile lock and moving on.\n' ' "%s" (infile lock)\n' ' "%s" (outfile lock)\n' % (infile_lock_path, outfile_lock_path)) cleanup_lock_f(infile_lock_f) infile_lock_f = None continue try: remove(outfile_path) except OSError, err: # OSError.errno == 2 => "No such file or directory", which is OK # since the point of `remove` is to make sure the path doesn't # exist; otherwise, we can't go on since the output file exists but # apparently cannot be overwritten if err.errno != 2: wstdout( '> ERROR: obtained locks but outfile path exists and' ' cannot be removed. Cleaning up locks and moving on.\n' '> "%s" (outfile path)\n' '> "%s" (infile_lock_path)\n' '> "%s" (outfile_lock_path)\n' % (outfile_path, infile_lock_path, outfile_lock_path)) cleanup_lock_f(infile_lock_f) infile_lock_f = None cleanup_lock_f(outfile_lock_f) outfile_lock_f = None continue except Exception: cleanup_lock_f(infile_lock_f) infile_lock_f = None cleanup_lock_f(outfile_lock_f) outfile_lock_f = None raise
def parse_args(descr=__doc__): """Parse command line arguments""" parser = ArgumentParser(description=descr, formatter_class=ArgumentDefaultsHelpFormatter) parser.add_argument( '--infile', default=None, help='''Path to the input file. If specified, do not specify --indir''' ) parser.add_argument( '--indir', default=None, help='''Path to the input directory, from which all I3 files will be processed. If specified, to not specify --infile''') parser.add_argument( '--outdir', required=True, help='''Output directory; must not be same as indir (or infile's directory), as the source file may be corrupted or removed if an error is encountered.''', ) parser.add_argument( '--gcd', required=True, help='Path to GCD file', ) parser.add_argument( '--skip', type=int, default=0, help='''Number of events to skip. Note that an "event" is defined as a frame containing the SRT_PULSE_NAME.''', ) parser.add_argument( '--n-events', type=int, default=0, help='''Total number of "events" to process (n-events <= 0 processes all events in the file(s) starting from from --skip). Note that an event is defined as a frame containing the SRT_PULSE_NAME.''', ) parser.add_argument( '--recos', type=str.lower, default='all', help='''Reco numbers to run. Specify "all" or a human-readable list, like "0-2,5" (which would perform steps 0, 1, 2, and 5). Note that indexing begins at 0.''', ) parser.add_argument( '--detector', required=True, type=str.lower, choices=['deepcore', 'pingu'], help='''Detector for which the MC or data was produced (this selects an appropriate SRT_PULSE_NAME)''') parser.add_argument( '--minutes-remaining', type=float, default=np.inf, help='''Minutes remaining in a job to run a reco; only those reconstructions with time limits less than this will run. Specify <= 0 for no limit.''') parser.add_argument( '--hours-remaining', type=float, default=np.inf, help='''Hours remaining in a job to run a reco; only those reconstructions with time limits less than this will run. Specify <= 0 for no limit.''') parser.add_argument( '--use-locks', action='store_true', help='''EXPERIMENTAL (and flaky): Use file locking to protect files from being processed by multiple separate processes.''') args = parser.parse_args() assert args.skip >= 0 if args.recos == 'all': args.requested = list(range(len(RECOS))) else: args.requested = hrlist2list(args.recos) num_inspecs = 0 if args.infile is not None: args.infile = abspath(expand(args.infile)) if not isfile(args.infile): raise IOError('`infile` "%s" is not a file.' % args.infile) num_inspecs += 1 if args.indir is not None: args.indir = abspath(expand(args.indir)) assert isdir(args.indir) num_inspecs += 1 if num_inspecs != 1: raise ValueError( 'Either --infile or --indir must be specified but not both.') if args.infile is not None: indir = abspath(dirname(expand(args.infile))) else: indir = abspath(expand(args.indir)) args.outdir = abspath(expand(args.outdir)) if args.outdir == indir: raise ValueError( 'Outdir cannot be same as indir (or if infile is specified,' ' directory in which infile resides') mkdir(args.outdir, warn=False) assert isdir(args.outdir) args.gcd = expand(args.gcd) assert isfile(args.gcd) if args.detector == 'pingu': args.srt_pulse_name = 'newSRT_TW_Cleaned_WavedeformPulses' args.geometry = 'pingu' elif args.detector == 'deepcore': args.srt_pulse_name = 'SRTTWOfflinePulsesDC' args.geometry = 'deepcore' if np.isinf(args.minutes_remaining): if np.isinf(args.hours_remaining): args.seconds_remaining = np.inf else: args.seconds_remaining = args.hours_remaining * 3600 else: if not np.isinf(args.hours_remaining): assert args.minutes_remaining == args.hours_remaining * 60 args.seconds_remaining = args.minutes_remaining * 60 if args.seconds_remaining <= 0: args.seconds_remaining = np.inf args.seconds_remaining = int( np.ceil(np.clip(args.seconds_remaining, a_min=0, a_max=31556926))) return args
def merge_and_rename(files): """Merge and/or rename all I3 files passed. Parameters ---------- files : sequence or iterable Returns ------- None if failure, or -- if success -- string containing path of generated file """ from icecube import dataio, icetray if len(files) == 1: filepath = files[0] recos_in_file = get_recos(filepath) new_path = pathFromRecos(orig_path=filepath, recos=recos_in_file) if new_path == filepath: return [filepath] if isfile(new_path): wstdout('Warning! "%s" exists and will be overwritten when current' ' file "%s" is renamed.\n' % (filepath, new_path)) try: rename(filepath, new_path) except: exc_str = ''.join(format_exception(*sys.exc_info())) wstdout('ERROR! Could not move file from "%s" to "%s".\n%s\n' % (filepath, new_path, exc_str)) return None return filepath # TODO: make sure recos are the same for all frames... frames = merge(files) for frame in frames: if frame.Stop == icetray.I3Frame.Physics: recos = get_recos(frame) new_path = pathFromRecos(orig_path=expand(files[0]), recos=recos) if isfile(new_path): recos_in_existing_file = get_recos(new_path) if set(recos_in_existing_file) != set(recos): wstdout('Warning! "%s" exists, has different recos from than "%s",' ' and will be overwritten by the latter.\n' % (filepath, new_path)) try: remove(new_path) except: exc_str = ''.join(format_exception(*sys.exc_info())) wstdout('ERROR! Could not remove file "%s".\n%s\n' % (new_path, exc_str)) return None i3file = None try: i3file = dataio.I3File(new_path, 'w') for frame in frames: i3file.push(frame) except: exc_str = ''.join(format_exception(*sys.exc_info())) wstdout('ERROR! Could not write to file "%s".\n%s\n' % (new_path, exc_str)) return None finally: if i3file is not None: i3file.close() for filepath in files: if expand(filepath) == new_path: continue try: remove(filepath) except: exc_str = ''.join(format_exception(*sys.exc_info())) wstdout('ERROR! Could not remove file "%s".\n%s\n' % (filepath, exc_str)) return new_path
def main(): """Main""" start_time_sec = time.time() args = parse_args() def _sigint_handler(signal, frame): # pylint: disable=unused-argument, redefined-outer-name wstderr('='*79 + '\n') wstderr('*** CAUGHT CTL-C (sigint) *** ... attempting to cleanup!\n') wstderr('='*79 + '\n') raise KeyboardInterrupt # Import IceCube things now from I3Tray import I3Tray # pylint: disable=import-error from icecube import dataclasses, dataio, icetray, multinest_icetray # pylint: disable=unused-variable, import-error from cluster import get_spline_tables lock_info = getProcessInfo() wstdout('='*79 + '\n') for d in [vars(args), lock_info]: wstdout('\n') wstdout( '\n'.join([(('%20s'%k) + ' = %s'%d[k]) for k in sorted(d.keys())]) ) wstdout('\n'*2) file_lister = FileLister(infile=args.infile, indir=args.indir) event_counter = EventCounter(srt_pulse_name=args.srt_pulse_name, skip=args.skip, n_events=args.n_events) expiration = time.time() + args.seconds_remaining expiration_timestamp = timestamp(at=expiration, utc=True) while True: infile_path = file_lister.get_next_file() if infile_path is None: wstdout('> No more files that can be processed. Quitting.\n') break # NOTE: cannot run on a file that has _all_ recos already run, since # output file cannot be same as input file (which it will have same # name, since the name is derived from recos run / etc.) already_run = recosFromPath(infile_path) # NOTE: now skipping a reco is determined ONLY by the "If" kwarg, and # not by the filename at all (swap the comment on the next line for the # line below to change behavior back) #recos_not_run_yet = sorted(set(args.requested) - set(already_run)) recos_not_run_yet = sorted(set(args.requested)) if not recos_not_run_yet: wstdout('> Nothing more to be done on file. Moving on. ("%s")\n' % infile_path) continue # See if file still exists if not isfile(infile_path): wstdout('> File no longer exists. Moving on. ("%s")\n' % infile_path) continue # Skip if empty input files if getsize(infile_path) == 0: wstdout('> Input file is 0-length. Moving on. ("%s")\n' % infile_path) continue # NOTE: commenting out the following and forcing an extremely long # timeout to allow all recos to run (of which many won't have to, # becuase they've already been run). Uncomment the following three # lines and comment out the "time_remaining =" line below to change the # behavior back when most or all recos have to be run #time_remaining = np.ceil( # args.seconds_remaining - (time.time() - start_time_sec) #) time_remaining = 3600 * 24 * 10000 # See if any reco at all fits in the remaining time if time_remaining <= MIN_RECO_TIME: wstdout('Not enough time to run *any* reco. Quitting.\n') break # See if any of the recos needing to be run on *this* file fit in the # remaining time; register all `reco_num`s that can be run recos_to_run = [] after_proc_time_remaining = time_remaining for reco_num in recos_not_run_yet: time_limit = RECOS[reco_num]['time_limit'] if time_limit > after_proc_time_remaining: continue recos_to_run.append(reco_num) after_proc_time_remaining -= time_limit time_to_run_processing = time_remaining - after_proc_time_remaining # Give the lock an extra minute beyond the strict time to run expiration = time.time() + time_to_run_processing + 60 expiration_timestamp = timestamp(at=expiration, utc=True) if not recos_to_run: wstdout('Not enough time to run any remaining reco on file. Moving' ' on. ("%s")\n' % infile_path) continue infile_lock_f, outfile_lock_f = None, None infile_lock_path = infile_path + LOCK_SUFFIX outfile_lock_path = None allrecos = set(recos_to_run).union(already_run) outfile_name = basename(pathFromRecos(orig_path=infile_path, recos=allrecos)) outfile_path = abspath(expand(join(args.outdir, outfile_name))) #print('args.outdir: "%s", outfile_name: "%s", outfile_path: "%s"' # % (args.outdir, outfile_name, outfile_path)) #break # debug outfile_lock_path = outfile_path + LOCK_SUFFIX if outfile_name == infile_path or outfile_path == infile_path: wstdout( 'Outfile is same as infile, which will lead to removal of' ' infile. Path = "%s" ; Moving on to next input file.\n' % infile_path ) continue lock_info['acquired_at'] = timestamp(utc=True) lock_info['expires_at'] = expiration_timestamp lock_info['infile'] = infile_path lock_info['outfile'] = outfile_path if isfile(outfile_path): wstdout('> Outfile path exists; will overwrite if both infile and' ' outfile locks can be obtained! ...\n' '> "%s"\n' % outfile_path) # NOTE: # Create lockfiles (if they don't exist) for each of the infile and # outfile, and try to acquire exclusive locks on these before # working with either the infile or outfile. # # Also: write info to the lockfiles to know when it's okay to clean # each up manually. Note that the `flock` will be removed by the OS # as soon as the lock file is closed or when this process dies. lock_info['type'] = 'infile_lock' try: if args.use_locks: infile_lock_f = acquire_lock(infile_lock_path, lock_info) except IOError: wstdout( '> infile lock failed to be obtained.' ' "%s"\n' % infile_lock_path ) infile_lock_f = None continue lock_info['type'] = 'outfile_lock' try: if args.use_locks: outfile_lock_f = acquire_lock(outfile_lock_path, lock_info) except IOError: wstdout( 'ERROR: outfile lock failed to be obtained.' ' Cleaning up infile lock and moving on.\n' ' "%s" (infile lock)\n' ' "%s" (outfile lock)\n' % (infile_lock_path, outfile_lock_path) ) cleanup_lock_f(infile_lock_f) infile_lock_f = None continue try: remove(outfile_path) except OSError, err: # OSError.errno == 2 => "No such file or directory", which is OK # since the point of `remove` is to make sure the path doesn't # exist; otherwise, we can't go on since the output file exists but # apparently cannot be overwritten if err.errno != 2: wstdout( '> ERROR: obtained locks but outfile path exists and' ' cannot be removed. Cleaning up locks and moving on.\n' '> "%s" (outfile path)\n' '> "%s" (infile_lock_path)\n' '> "%s" (outfile_lock_path)\n' % (outfile_path, infile_lock_path, outfile_lock_path) ) cleanup_lock_f(infile_lock_f) infile_lock_f = None cleanup_lock_f(outfile_lock_f) outfile_lock_f = None continue except Exception: cleanup_lock_f(infile_lock_f) infile_lock_f = None cleanup_lock_f(outfile_lock_f) outfile_lock_f = None raise
def parse_args(descr=__doc__): """Parse command line arguments""" parser = ArgumentParser( description=descr, formatter_class=ArgumentDefaultsHelpFormatter ) parser.add_argument( '--infile', default=None, help='''Path to the input file. If specified, do not specify --indir''' ) parser.add_argument( '--indir', default=None, help='''Path to the input directory, from which all I3 files will be processed. If specified, to not specify --infile''' ) parser.add_argument( '--outdir', required=True, help='''Output directory; must not be same as indir (or infile's directory), as the source file may be corrupted or removed if an error is encountered.''', ) parser.add_argument( '--gcd', required=True, help='Path to GCD file', ) parser.add_argument( '--skip', type=int, default=0, help='''Number of events to skip. Note that an "event" is defined as a frame containing the SRT_PULSE_NAME.''', ) parser.add_argument( '--n-events', type=int, default=0, help='''Total number of "events" to process (n-events <= 0 processes all events in the file(s) starting from from --skip). Note that an event is defined as a frame containing the SRT_PULSE_NAME.''', ) parser.add_argument( '--recos', type=str.lower, default='all', help='''Reco numbers to run. Specify "all" or a human-readable list, like "0-2,5" (which would perform steps 0, 1, 2, and 5). Note that indexing begins at 0.''', ) parser.add_argument( '--detector', required=True, type=str.lower, choices=['deepcore', 'pingu'], help='''Detector for which the MC or data was produced (this selects an appropriate SRT_PULSE_NAME)''' ) parser.add_argument( '--minutes-remaining', type=float, default=np.inf, help='''Minutes remaining in a job to run a reco; only those reconstructions with time limits less than this will run. Specify <= 0 for no limit.''' ) parser.add_argument( '--hours-remaining', type=float, default=np.inf, help='''Hours remaining in a job to run a reco; only those reconstructions with time limits less than this will run. Specify <= 0 for no limit.''' ) parser.add_argument( '--use-locks', action='store_true', help='''EXPERIMENTAL (and flaky): Use file locking to protect files from being processed by multiple separate processes.''' ) args = parser.parse_args() assert args.skip >= 0 if args.recos == 'all': args.requested = list(range(len(RECOS))) else: args.requested = hrlist2list(args.recos) num_inspecs = 0 if args.infile is not None: args.infile = abspath(expand(args.infile)) assert isfile(args.infile) num_inspecs += 1 if args.indir is not None: args.indir = abspath(expand(args.indir)) assert isdir(args.indir) num_inspecs += 1 if num_inspecs != 1: raise ValueError( 'Either --infile or --indir must be specified but not both.' ) if args.infile is not None: indir = abspath(dirname(expand(args.infile))) else: indir = abspath(expand(args.indir)) args.outdir = abspath(expand(args.outdir)) if args.outdir == indir: raise ValueError( 'Outdir cannot be same as indir (or if infile is specified,' ' directory in which infile resides' ) mkdir(args.outdir, warn=False) assert isdir(args.outdir) args.gcd = expand(args.gcd) assert isfile(args.gcd) if args.detector == 'pingu': args.srt_pulse_name = 'newSRT_TW_Cleaned_WavedeformPulses' args.geometry = 'pingu' elif args.detector == 'deepcore': args.srt_pulse_name = 'SRTTWOfflinePulsesDC' args.geometry = 'deepcore' if np.isinf(args.minutes_remaining): if np.isinf(args.hours_remaining): args.seconds_remaining = np.inf else: args.seconds_remaining = args.hours_remaining * 3600 else: if not np.isinf(args.hours_remaining): assert args.minutes_remaining == args.hours_remaining*60 args.seconds_remaining = args.minutes_remaining * 60 if args.seconds_remaining <= 0: args.seconds_remaining = np.inf args.seconds_remaining = int(np.ceil(np.clip(args.seconds_remaining, a_min=0, a_max=31556926))) return args