def create_directory_structure(expand, remove=None, create=None,copy=None,link=None, dry_run=False): """Creates a subdirectory structure, and copies, moves, and links in files Arguments: expand -- a single-argument function to perform any string substitutions on any of the input arguments create -- a list of subdirectories to create if they don't already exists remove -- a list of file patterns to remove copy -- a list of file patterns to copy link -- a list of file patterns to link dry_run -- log rather than execute commands """ # pass initial time as an argument, to leave a one-argument function which will expand strings logger = loghelper.get(LOGGER) if create: for d in create: subdir = expand(d) shared.create(subdir, dry_run=dry_run) if remove: for pattern in remove: shared.remove(expand(pattern), dry_run=dry_run) if copy: for pattern in copy: shared.copy(expand(pattern), dry_run=dry_run) if link: for pattern in link: shared.link(expand(pattern), dry_run=dry_run)
def main(): # merge command-line and file-specified arguments config = conf.config(__doc__, sys.argv[1:]) logger = loghelper.create(LOGGER, log_level=config.get('log.level'), log_fmt=config.get('log.format'), log_file=config.get('log.file')) base_dir = config['base_dir'] wrftools_dir = config['wrftools_dir'] dry_run = config.get('dry_run') jobs = config.get('jobs') create = config.get('initialise.create') remove = config.get('initialise.remove') copy = config.get('initialise.copy') link = config.get('initialise.link') if create: for d in create: shared.create(d, dry_run=dry_run) if remove: for pattern in remove: shared.remove(pattern, dry_run=dry_run) if copy: for pattern in copy: shared.copy(pattern, dry_run=dry_run) if link: for pattern in link: shared.link(pattern, dry_run=dry_run) logger.debug("init.py done") print("\n\n") print("************************************************") print(NEXT_STEPS % (base_dir, base_dir)) print("************************************************")
def main(): # merge command-line and file-specified arguments config = conf.config(__doc__, sys.argv[1:]) logger = loghelper.create(LOGGER, log_level=config.get('log.level'), log_fmt=config.get('log.format'), log_file=config.get('log.file')) base_dir = config['base_dir'] wrftools_dir = config['wrftools_dir'] dry_run = config.get('dry_run') jobs = config.get('jobs') create = config.get('initialise.create') remove = config.get('initialise.remove') copy = config.get('initialise.copy') link = config.get('initialise.link') if create: for d in create: shared.create(d, dry_run=dry_run) if remove: for pattern in remove: shared.remove(pattern, dry_run=dry_run) if copy: for pattern in copy: shared.copy(pattern, dry_run=dry_run) if link: for pattern in link: shared.link(pattern, dry_run=dry_run) logger.debug("init.py done") print("\n\n") print("************************************************") print(NEXT_STEPS % (base_dir,base_dir)) print("************************************************")
def main(): # merge command-line and file-specified arguments config = conf.config(__doc__, sys.argv[1:]) logger = loghelper.create(LOGGER, log_level=config.get('log.level'), log_fmt=config.get('log.format'), log_file=config.get('log.file')) if not os.path.exists(config['namelist_wps']): logger.error("No namelist.wps found, %s was specifed as template, but does not exist" % config['namelist_wps']) sys.exit() if not os.path.exists(config['namelist_input']): logger.error("No namelist.input found, %s was specifed as template, but does not exist" % config['namlist_input']) sys.exit() dry_run = config.get('dry-run') rmtree = config.get('rmtree') max_dom = config['max_dom'] bdy_interval = config['bdy_interval'] fcst_hours = config['fcst_hours'] logger.debug(fcst_hours) history_interval = config['history_interval'] link_boundaries = config.get('link-boundaries') # either the start time is exactly specified, or else we calculate it if config.get('start'): init_time = config['start'] else: init_time = shared.get_time(base_time=config.get('base-time'), delay=config.get('delay'), round=config.get('cycles')) if config.get('end'): end_init = config['end'] logger.debug(end_init) init_interval = config['init_interval'] init_times = list(rrule.rrule(freq=rrule.HOURLY, interval=init_interval, dtstart=init_time, until=end_init)) else: init_times = [init_time] for init_time in init_times: try: logger.info("**** Running simulation for %s *****" % init_time) # one-argument function to do initial-time substitution in strings expand = lambda s : substitute.sub_date(s, init_time=init_time) if type(s)==type("") else s date_replacements = substitute.date_replacements(init_time=init_time) working_dir = expand(config['working_dir']) logger.info("working dir: %s " % working_dir) if rmtree: safe_remove(working_dir, dry_run) create_directory_structure(expand, remove=config.get('prepare.remove'), create=config.get('prepare.create'), copy=config.get('prepare.copy'), link=config.get('prepare.link'), dry_run=dry_run) if config.get('prepare.template'): for entry in config['prepare.template']: tokens = expand(entry).split() source = tokens[0] target = tokens[1] if len(tokens)>1 else tokens[0] templater.fill_template(source, target, date_replacements) bdy_times = shared.get_bdy_times(init_time, fcst_hours, bdy_interval) working_namelist = working_dir+"/namelist.wps" # this can be made cleaner prefix='' update_namelist_wps(config['namelist_wps'], working_namelist, config['max_dom'], init_time, fcst_hours, config['bdy_interval'], config['geo_em_dir'], config['met_em_dir'], config['geogrid_run_dir'], config['metgrid_run_dir'], prefix, config.get('constants_name')) working_namelist = working_dir+"/namelist.input" logger.debug(fcst_hours) update_namelist_input(config['namelist_input'], working_namelist, max_dom, init_time, fcst_hours, history_interval, bdy_interval*60*60, metadata=config.get('metadata')) logger.debug(fcst_hours) # apply any additional specified namelist updates (consider getting rid of this section) namelist_updates = config.get('namelist_updates') if namelist_updates: for key in sorted(namelist_updates.keys()): entry = namelist_updates[key] logger.debug('processing namelist update entry %s' % key) template = expand(entry['template']) target = expand(entry['target']) logger.debug('%s\t---->\t%s' %(template.ljust(20), target.ljust(20))) namelist = shared.read_namelist(template) if entry.get('update'): for old,new in entry['update'].items(): logger.debug('\t%s\t:\t%s' %(old.ljust(20), expand(new).ljust(20))) namelist.update(old,expand(new)) namelist.to_file(target) # link in input files for all ungrib jobs # update namelist.wps to modify start and end time if config.get('ungrib'): for key,entry in config['ungrib'].items(): # apply any delay and rounding to the init_time to get correct time for dataset # note that sometimes it is necessary to use a different time e.g. for SST field is delayed by one day run_dir = expand(entry['run_dir']) base_time = shared.get_time(init_time, delay=entry.get('delay'), round=entry.get('cycles')) ungrib_len = int(entry['ungrib_len']) bdy_times = shared.get_bdy_times(base_time, ungrib_len, bdy_interval) namelist = shared.read_namelist(run_dir+"/namelist.wps") start_str = base_time.strftime("%Y-%m-%d_%H:%M:%S") end_str = bdy_times[-1].strftime("%Y-%m-%d_%H:%M:%S") namelist.update('start_date', [start_str]*max_dom) namelist.update('end_date', [end_str]*max_dom) namelist.to_file(run_dir+"/namelist.wps") # link in vtable vtable = entry['vtable'] cmd = "%s %s/Vtable" % (vtable, run_dir) shared.link(cmd, dry_run=dry_run) if link_boundaries: file_pattern = entry['files'] # create an ordered set to ensure filenames only appear once filenames = shared.ordered_set([substitute.sub_date(file_pattern, init_time=base_time, valid_time=t) for t in bdy_times]) missing_files = [] for f in filenames: if not os.path.exists(f): missing_files.append(f) logger.error("%s \t missing" % f) if missing_files!=[]: if rmtree: safe_remove(working_dir, dry_run) raise IOError("some files could not be found") args = ' '.join(filenames) cmd = '%s/link_grib.csh %s' %(run_dir,args) shared.run_cmd(cmd, dry_run=dry_run, cwd=run_dir, log=False) except IOError as e: logger.error(e)