Esempio n. 1
0
def update_namelist_wps(template, target, max_dom, init_time, fcst_hours, bdy_interval, geo_em_dir, met_em_dir, geogrid_tbl, metgrid_tbl, prefix, constants_name=None):
    """ Updates the namelist.wps to reflect updated settings in config
    
    Arguments:
        expand -- function for exanding any strings
        template -- base namelist_wps file to alter 
        target  -- name of modified file to write to
        
    """    
    # consider refactoring this method, perhaps relacing it with a more general namelist updating function,
    # and pushing some of the logic into the configuration file?
    logger     = shared.get_logger()
    logger.debug('*** UPDATING namelist.wps ***')

    # function to expand any date/time placeholders
    expand = lambda s : substitute.sub_date(s, init_time=init_time)
    
    #wps_run_dir= expand(target)
    
    bdy_times  = shared.get_bdy_times(init_time, fcst_hours, bdy_interval)

    met_em_dir = expand(met_em_dir)
    geo_em_dir = expand(geo_em_dir)

    interval_seconds = bdy_interval * 60 * 60

    logger.debug('reading namelist.wps <--------- %s' % template)
    namelist = shared.read_namelist(template)

    #
    # Update some options based on the forecast config file
    #
    namelist.update('max_dom', max_dom)
    namelist.update('opt_output_from_geogrid_path', geo_em_dir, section='share')
    namelist.update('opt_output_from_metgrid_path', met_em_dir, section='metgrid')
    namelist.update('opt_geogrid_tbl_path', expand(geogrid_tbl), section='geogrid')
    namelist.update('opt_metgrid_tbl_path', expand(metgrid_tbl), section='metgrid')
    namelist.update('interval_seconds', [interval_seconds])
    
    #
    # Generate formatted strings for inclusion in the namelist.wps file
    #
    start_str  = bdy_times[0].strftime("%Y-%m-%d_%H:%M:%S")
    end_str    = bdy_times[-1].strftime("%Y-%m-%d_%H:%M:%S")
    logger.debug("Updating namelist.wps start and end")

    namelist.update('start_date', [start_str]*max_dom)
    namelist.update('end_date',   [end_str]*max_dom)

    if constants_name:
        namelist.update('constants_name', constants_name, section='metgrid')
    
    namelist.to_file(target)
    logger.debug('*** FINISHED UPDATING namelist.wps ***')
Esempio n. 2
0
def main():
    # merge command-line and file-specified arguments
    config = conf.config(__doc__, sys.argv[1:])

    logger = loghelper.create(LOGGER, log_level=config.get('log.level'), 
                              log_fmt=config.get('log.format'), 
                              log_file=config.get('log.file'))
    
    
    if not os.path.exists(config['namelist_wps']):
        logger.error("No namelist.wps found, %s was specifed as template, but does not exist" % config['namelist_wps'])
        sys.exit()
    
    if not os.path.exists(config['namelist_input']):
        logger.error("No namelist.input found, %s was specifed as template, but does not exist" % config['namlist_input'])
        sys.exit()
    
    
    dry_run = config.get('dry-run')
    rmtree = config.get('rmtree')
    max_dom = config['max_dom']
    bdy_interval = config['bdy_interval']
    fcst_hours = config['fcst_hours']
    logger.debug(fcst_hours)
    

    history_interval = config['history_interval'] 
    link_boundaries = config.get('link-boundaries')
    
    # either the start time is exactly specified, or else we calculate it
    if config.get('start'):
        init_time = config['start']
    else:
        init_time = shared.get_time(base_time=config.get('base-time'), delay=config.get('delay'), round=config.get('cycles'))

    if config.get('end'):
        end_init = config['end']
        logger.debug(end_init)
        init_interval = config['init_interval']
        init_times = list(rrule.rrule(freq=rrule.HOURLY, interval=init_interval, dtstart=init_time, until=end_init))
    else:
        init_times = [init_time]



    for init_time in init_times:
        try:
            logger.info("**** Running simulation for %s *****" % init_time)
            
            # one-argument function to do initial-time substitution in strings
            expand = lambda s : substitute.sub_date(s, init_time=init_time) if type(s)==type("") else s

            date_replacements = substitute.date_replacements(init_time=init_time)
            
            working_dir = expand(config['working_dir'])
            logger.info("working dir: %s " % working_dir)

            if rmtree:
                safe_remove(working_dir, dry_run)
            
            create_directory_structure(expand, remove=config.get('prepare.remove'), create=config.get('prepare.create'), copy=config.get('prepare.copy'), link=config.get('prepare.link'), dry_run=dry_run)    
            if config.get('prepare.template'):
                for entry in config['prepare.template']:
                    tokens = expand(entry).split()
                    source = tokens[0]
                    target = tokens[1] if len(tokens)>1 else tokens[0]
                    templater.fill_template(source, target, date_replacements)


            
            bdy_times = shared.get_bdy_times(init_time, fcst_hours, bdy_interval)
            
            working_namelist = working_dir+"/namelist.wps"
        
            # this can be made cleaner
            prefix=''
            update_namelist_wps(config['namelist_wps'], working_namelist, config['max_dom'], 
                                init_time, fcst_hours, config['bdy_interval'], 
                                config['geo_em_dir'], config['met_em_dir'], config['geogrid_run_dir'], config['metgrid_run_dir'], prefix, 
                                config.get('constants_name'))
        
            working_namelist = working_dir+"/namelist.input"
            logger.debug(fcst_hours)
            
            update_namelist_input(config['namelist_input'], working_namelist, max_dom, init_time, fcst_hours, history_interval, bdy_interval*60*60, metadata=config.get('metadata'))    
            logger.debug(fcst_hours)
            
            # apply any additional specified namelist updates (consider getting rid of this section)
            namelist_updates = config.get('namelist_updates')
            if namelist_updates:
                for key in sorted(namelist_updates.keys()):
                    entry = namelist_updates[key]
                    logger.debug('processing namelist update entry %s' % key)
                    
                    template = expand(entry['template'])
                    target = expand(entry['target'])
                    logger.debug('%s\t---->\t%s' %(template.ljust(20), target.ljust(20)))
                    namelist = shared.read_namelist(template)
                    if entry.get('update'):
                        for old,new in entry['update'].items():
                            logger.debug('\t%s\t:\t%s' %(old.ljust(20), expand(new).ljust(20)))
                            namelist.update(old,expand(new))
                    namelist.to_file(target)
        


            # link in input files for all ungrib jobs
            # update namelist.wps to modify start and end time
            
            if config.get('ungrib'):
                for key,entry in config['ungrib'].items():
                    # apply any delay and rounding to the init_time to get correct time for dataset
                    # note that sometimes it is necessary to use a different time e.g. for SST field is delayed by one day
                    run_dir = expand(entry['run_dir'])
                    base_time = shared.get_time(init_time, delay=entry.get('delay'), round=entry.get('cycles'))
                    ungrib_len = int(entry['ungrib_len'])
                    bdy_times = shared.get_bdy_times(base_time, ungrib_len, bdy_interval)
                    namelist = shared.read_namelist(run_dir+"/namelist.wps")

                    start_str  = base_time.strftime("%Y-%m-%d_%H:%M:%S")
                    end_str    = bdy_times[-1].strftime("%Y-%m-%d_%H:%M:%S")

                    namelist.update('start_date', [start_str]*max_dom)
                    namelist.update('end_date',   [end_str]*max_dom)
                    namelist.to_file(run_dir+"/namelist.wps")
                    
                    # link in vtable
                    vtable = entry['vtable']
                    cmd = "%s %s/Vtable" % (vtable, run_dir)
                    shared.link(cmd, dry_run=dry_run)
               

                    if link_boundaries:
               
                        file_pattern = entry['files']
                        
                        # create an ordered set to ensure filenames only appear once
                        filenames = shared.ordered_set([substitute.sub_date(file_pattern, init_time=base_time, valid_time=t) for t in bdy_times])
                        missing_files = []
                        for f in filenames:
                            if not os.path.exists(f): 
                                missing_files.append(f)
                                logger.error("%s \t missing" % f)
                                
                        if missing_files!=[]:
                            if rmtree:
                                safe_remove(working_dir, dry_run)
                            raise IOError("some files could not be found")




                        args = ' '.join(filenames)
                        cmd = '%s/link_grib.csh %s' %(run_dir,args)
                        shared.run_cmd(cmd, dry_run=dry_run, cwd=run_dir, log=False)
                
                
        
        except IOError as e:
            logger.error(e)