Exemple #1
0
def timing(config):
    """Reads a rsl file from WRF and works out timing information
    from that """

    #
    # Where should we assume to find the rsl file?
    # In the wrf_working_dir
    #
    logger = shared.get_logger()
    logger.info('*** Computing timing information ***')
    wrf_run_dir = config['wrf_run_dir']
    rsl_file = '%s/rsl.error.0000' % wrf_run_dir
    namelist_input = config['namelist_input']
    namelist = shared.read_namelist(namelist_input).settings
    timestep = namelist['time_step'][0]
    f = open(rsl_file, 'r')
    lines = f.read().split('\n')

    # get timings on outer domain
    main_times = [
        float(l.split()[8]) for l in lines
        if re.search("^Timing for main: time .* 1:", l)
    ]
    total = sum(main_times)
    steps = len(main_times)
    time_per_step = (total / steps)
    x_real = timestep / time_per_step
    logger.info('\n*** TIMING INFORMATION ***')
    logger.info('\t %d outer timesteps' % steps)
    logger.info('\t %0.3f elapsed seconds' % total)
    logger.info('\t %0.3f seconds per timestep' % time_per_step)
    logger.info('\t %0.3f times real time' % x_real)
    logger.info('*** END TIMING INFORMATION ***\n')
Exemple #2
0
def timing(config):
    """Reads a rsl file from WRF and works out timing information
    from that """

    #
    # Where should we assume to find the rsl file?
    # In the wrf_working_dir
    #
    logger =shared.get_logger()
    logger.info('*** Computing timing information ***')
    wrf_run_dir    = config['wrf_run_dir']
    rsl_file       = '%s/rsl.error.0000' % wrf_run_dir
    namelist_input = config['namelist_input']
    namelist       = shared.read_namelist(namelist_input).settings
    timestep       = namelist['time_step'][0]
    f              = open(rsl_file, 'r')
    lines          = f.read().split('\n')
    
    # get timings on outer domain
    main_times  = [float(l.split()[8]) for l in lines if re.search("^Timing for main: time .* 1:", l)]
    total       = sum(main_times)
    steps       = len(main_times)
    time_per_step = (total/steps)
    x_real       = timestep / time_per_step
    logger.info('\n*** TIMING INFORMATION ***')
    logger.info('\t %d outer timesteps' % steps)
    logger.info('\t %0.3f elapsed seconds' % total)
    logger.info('\t %0.3f seconds per timestep' % time_per_step )
    logger.info('\t %0.3f times real time' %x_real)
    logger.info('*** END TIMING INFORMATION ***\n')    
Exemple #3
0
def update_namelist_wps(config):
    """ Updates the namelist.wps to reflect updated settings in config
    
    Arguments:
    config -- dictionary containing various configuration options
        
    """
    logger = shared.get_logger()
    logger.debug('*** UPDATING namelist.wps ***')

    #domain_dir = config['domain_dir']
    #model_run  = config['model_run']
    #wps_dir    = config['wps_dir']
    wps_run_dir = config['wps_run_dir']  # required for opt_geogrid_tbl_path
    #bdy_conditions = config['bdy_conditions']

    namelist_wps = config['namelist_wps']
    shutil.copyfile(namelist_wps, namelist_wps + '.backup')

    bdy_times = shared.get_bdy_times(config)

    max_dom = config['max_dom']
    init_time = config['init_time']

    met_em_dir = shared.sub_date(config['met_em_dir'], init_time=init_time)
    geo_em_dir = config['geo_em_dir']

    bdy_interval = config['bdy_interval']
    interval_seconds = bdy_interval * 60 * 60

    logger.debug('reading namelist.wps <--------- %s' % namelist_wps)
    namelist = shared.read_namelist(namelist_wps)

    #
    # Update some options based on the forecast config file
    #
    namelist.update('max_dom', max_dom)
    namelist.update('opt_output_from_geogrid_path',
                    geo_em_dir,
                    section='share')
    namelist.update('opt_geogrid_tbl_path', wps_run_dir, section='geogrid')
    namelist.update('opt_metgrid_tbl_path', wps_run_dir, section='metgrid')
    namelist.update('interval_seconds', [interval_seconds])

    #
    # Generate formatted strings for inclusion in the namelist.wps file
    #
    start_str = bdy_times[0].strftime("%Y-%m-%d_%H:%M:%S")
    end_str = bdy_times[-1].strftime("%Y-%m-%d_%H:%M:%S")
    logger.debug("Updating namelist.wps start and end")
    logger.debug(start_str)
    logger.debug(end_str)

    namelist.update('start_date', [start_str] * max_dom)
    namelist.update('end_date', [end_str] * max_dom)

    logger.debug('writing modified namelist.wps to file')
    namelist.to_file(namelist_wps)
    logger.debug('*** FINISHED UPDATING namelist.wps ***')
Exemple #4
0
def update_namelist_wps(config):
    """ Updates the namelist.wps to reflect updated settings in config
    
    Arguments:
    config -- dictionary containing various configuration options
        
    """    
    logger     = shared.get_logger()
    logger.debug('*** UPDATING namelist.wps ***')

    #domain_dir = config['domain_dir']
    #model_run  = config['model_run']
    #wps_dir    = config['wps_dir']
    wps_run_dir= config['wps_run_dir']          # required for opt_geogrid_tbl_path
    #bdy_conditions = config['bdy_conditions'] 
    
    namelist_wps = config['namelist_wps']
    shutil.copyfile(namelist_wps, namelist_wps+'.backup')
    
    bdy_times  = shared.get_bdy_times(config)
    
    max_dom    = config['max_dom']
    init_time  = config['init_time']

    met_em_dir = shared.sub_date(config['met_em_dir'], init_time=init_time)
    geo_em_dir = config['geo_em_dir']

    bdy_interval = config['bdy_interval']
    interval_seconds = bdy_interval * 60 * 60

    logger.debug('reading namelist.wps <--------- %s' % namelist_wps)
    namelist = shared.read_namelist(namelist_wps)

    #
    # Update some options based on the forecast config file
    #
    namelist.update('max_dom', max_dom)
    namelist.update('opt_output_from_geogrid_path', geo_em_dir, section='share')
    namelist.update('opt_geogrid_tbl_path', wps_run_dir, section='geogrid')
    namelist.update('opt_metgrid_tbl_path', wps_run_dir, section='metgrid')
    namelist.update('interval_seconds', [interval_seconds])
    
    #
    # Generate formatted strings for inclusion in the namelist.wps file
    #
    start_str  = bdy_times[0].strftime("%Y-%m-%d_%H:%M:%S")
    end_str    = bdy_times[-1].strftime("%Y-%m-%d_%H:%M:%S")
    logger.debug("Updating namelist.wps start and end")
    logger.debug(start_str)
    logger.debug(end_str)

    namelist.update('start_date', [start_str]*max_dom)
    namelist.update('end_date',   [end_str]*max_dom)

        
    logger.debug('writing modified namelist.wps to file')
    namelist.to_file(namelist_wps)
    logger.debug('*** FINISHED UPDATING namelist.wps ***')
Exemple #5
0
def run_metgrid(config):
    """ Runs metgrid.exe and checks output was sucessful
    
    Arguments:
    config -- dictionary specifying configuration options
    
    """
    logger = shared.get_logger()
    logger.info("\n*** RUNNING METGRID ***")

    queue = config['queue']
    wps_run_dir = config['wps_run_dir']
    log_file = '%s/metgrid.log' % wps_run_dir
    bdy_conditions = config['bdy_conditions']
    namelist_wps = config['namelist_wps']
    namelist = shared.read_namelist(namelist_wps)

    met_em_dir = shared.sub_date(config['met_em_dir'], config['init_time'])

    #
    # vtable may be a dictionary to support running ungrib multiple
    # times. In which case, we need to put multiple prefixes into
    # the namelist.wps file
    #

    vtable = config['vtable']

    if type(vtable) == type({}):
        prefixes = vtable.keys()
    else:
        prefixes = [bdy_conditions]

    namelist.update('fg_name', prefixes)
    namelist.update('opt_output_from_metgrid_path',
                    met_em_dir,
                    section='metgrid')
    if not config['sst']:
        namelist.remove('constants_name')

    namelist.to_file(namelist_wps)

    logger.debug('met_em_dir: %s' % met_em_dir)
    if not os.path.exists(met_em_dir):
        logger.debug('creating met_em_dir: %s ' % met_em_dir)
        os.makedirs(met_em_dir)

    os.chdir(wps_run_dir)
    cmd = "%s/metgrid.exe" % wps_run_dir

    shared.run(cmd, config, from_dir=wps_run_dir)

    cmd = 'grep "Successful completion" %s/metgrid.log*' % wps_run_dir
    ret = shared.run_cmd(cmd, config)
    if ret != 0:
        raise IOError('metgrid.exe did not complete')

    logger.info('*** SUCESS METGRID ***\n')
Exemple #6
0
def run_metgrid(config):
    """ Runs metgrid.exe and checks output was sucessful
    
    Arguments:
    config -- dictionary specifying configuration options
    
    """
    logger =shared.get_logger()
    logger.info("\n*** RUNNING METGRID ***")
    
    queue          = config['queue']
    wps_run_dir    = config['wps_run_dir']
    log_file       = '%s/metgrid.log' % wps_run_dir
    bdy_conditions = config['bdy_conditions']
    namelist_wps   = config['namelist_wps']
    namelist       = shared.read_namelist(namelist_wps)
    
    met_em_dir     = shared.sub_date(config['met_em_dir'], config['init_time'])        
    
    #
    # vtable may be a dictionary to support running ungrib multiple
    # times. In which case, we need to put multiple prefixes into
    # the namelist.wps file
    #
    
    vtable = config['vtable']
    
    if type(vtable)==type({}):
        prefixes = vtable.keys()
    else:
        prefixes = [bdy_conditions]    

        
    namelist.update('fg_name', prefixes)
    namelist.update('opt_output_from_metgrid_path', met_em_dir, section='metgrid')
    if not config['sst']:
        namelist.remove('constants_name')
        
    namelist.to_file(namelist_wps)
    
    logger.debug('met_em_dir: %s' % met_em_dir)
    if not os.path.exists(met_em_dir):
        logger.debug('creating met_em_dir: %s ' % met_em_dir)
        os.makedirs(met_em_dir)

    os.chdir(wps_run_dir)
    cmd      =  "%s/metgrid.exe" % wps_run_dir
    
    shared.run(cmd, config, from_dir=wps_run_dir)

    cmd = 'grep "Successful completion" %s/metgrid.log*' % wps_run_dir
    ret =shared.run_cmd(cmd, config)
    if ret!=0:
        raise IOError('metgrid.exe did not complete')
    
    logger.info('*** SUCESS METGRID ***\n')
Exemple #7
0
def update_namelist_input(config):
    """ Updates the namelist.input file to reflect updated settings in config.
    Adds a non-standard &metadata section to give a name to the model run
    
    Arguments:
    config -- dictionary containing various configuration options
        
    """
    logger = shared.get_logger()
    logger.debug('*** UPDATING namelist.input ***')

    #wrf_dir       = config['wrf_dir']
    working_dir = config['working_dir']
    model = config['model']
    model_run = config['model_run']

    domain = config['domain']

    namelist_run = '%s/namelist.input' % working_dir
    namelist_input = config['namelist_input']
    namelist_wps = config['namelist_wps']
    shutil.copyfile(namelist_input, namelist_input + '.backup')

    # read settings from domain-based namelist
    namelist = shared.read_namelist(namelist_input)

    # read settings from domain-based namelist.wps
    #namelist_wps   = shared.read_namelist(namelist_wps)
    #wps_settings   = namelist_wps.settings

    #
    # Add new metadata section to namelist.input
    #
    logger.debug('Adding metatdata to the namelist.input')
    logger.debug('domain = %s' % domain)
    logger.debug('model = %s' % model)
    logger.debug('model_run = %s' % model_run)
    namelist.update('domain', domain, 'metadata')
    namelist.update('model', model, 'metadata')
    namelist.update('model_run', model_run, 'metadata')

    #
    # Overule max_dom with one in config
    #
    max_dom = config['max_dom']
    namelist.update('max_dom', max_dom)
    #logger.debug("Syncing dx and dy between namelist.wps and namelist.input")
    #dx = wps_settings['dx']
    #dy = wps_settings['dy']
    #logger.debug("namleist.wps: dx: %s ------> namelist.input" % dx)
    #logger.debug("namleist.wps: dy: %s ------> namelist.input" % dy)
    #namelist.update('dx', wps_settings['dx'])
    #namelist.update('dy', wps_settings['dy'])
    fcst_hours = config['fcst_hours']
    fcst_times = shared.get_fcst_times(config)
    history_interval = config['history_interval']
    bdy_interval = config['bdy_interval']  # this is in hours

    interval_seconds = 60 * 60 * bdy_interval

    start = fcst_times[0]
    end = fcst_times[-1]
    diff = end - start

    #
    # I'm still not sure how the WRF namelist works between
    # the start and end settings and the run_XX settings.
    # I think we can just keep days as zero, and work entirely
    # in hours
    #
    namelist.update('start_year', [start.year] * max_dom)
    namelist.update('start_month', [start.month] * max_dom)
    namelist.update('start_day', [start.day] * max_dom)
    namelist.update('start_hour', [start.hour] * max_dom)
    namelist.update('end_year', [end.year] * max_dom)
    namelist.update('end_month', [end.month] * max_dom)
    namelist.update('end_day', [end.day] * max_dom)
    namelist.update('end_hour', [end.hour] * max_dom)
    namelist.update('run_days', [0])
    namelist.update('run_hours', [fcst_hours])
    #namelist.update('run_minutes',[0])
    namelist.update('history_interval', [history_interval] * max_dom)
    namelist.update('interval_seconds', [interval_seconds])

    #
    # If DFI is being used, update DFI settings
    # From user guide:
    # "For time specification, it typically needs to integrate
    # backward for 0.5 to 1 hour, and integrate forward for half of the time."
    #
    # should we just write this everytime into the file and rely of dfi_opt
    # as the on/off switch?
    #
    hour = datetime.timedelta(0, 60 * 60)
    minute = datetime.timedelta(0, 60)
    #dfi_bck      = config['dfi_bck'] * minute
    #dfi_fwd      = config['dfi_fwd'] * minute
    #dfi_bckstop  = start - dfi_bck
    #dfi_fwdstop  = start + dfi_fwd

    #namelist.update('dfi_bckstop_year',   dfi_bckstop.year,   'dfi_control')
    #namelist.update('dfi_bckstop_month',  dfi_bckstop.month,  'dfi_control')
    #namelist.update('dfi_bckstop_day',    dfi_bckstop.day,    'dfi_control')
    #namelist.update('dfi_bckstop_hour',   dfi_bckstop.hour,   'dfi_control')
    #namelist.update('dfi_bckstop_minute', dfi_bckstop.minute, 'dfi_control')
    #namelist.update('dfi_bckstop_second', dfi_bckstop.second, 'dfi_control')
    #namelist.update('dfi_fwdstop_year',   dfi_fwdstop.year,   'dfi_control')
    #namelist.update('dfi_fwdstop_month',  dfi_fwdstop.month,  'dfi_control')
    #namelist.update('dfi_fwdstop_day',    dfi_fwdstop.day,    'dfi_control')
    #namelist.update('dfi_fwdstop_hour',   dfi_fwdstop.hour,   'dfi_control')
    #namelist.update('dfi_fwdstop_minute', dfi_fwdstop.minute, 'dfi_control')
    #namelist.update('dfi_fwdstop_second', dfi_fwdstop.second, 'dfi_control')

    logger.debug('writing new settings to file')
    namelist.to_file(namelist_input)

    #logger.debug(namelist)
    logger.debug('*** FINISHED UPDATING namelist.input ***')
Exemple #8
0
def run_ungrib(config):
    """ Runs ungrib.exe and checks output was sucessfull
    If vtable and gbr_input_fmt are NOT dictionaries, 
    then dictionarius will be constructed from them using 
    the key bdy_conditions from the metadata
    
    Arguments:
    config -- dictionary specifying configuration options
    
    """
    logger = shared.get_logger()
    wps_dir = config['wps_dir']
    wps_run_dir = config['wps_run_dir']
    namelist_wps = config['namelist_wps']
    working_dir = config['working_dir']
    met_em_dir = config['met_em_dir']
    init_time = config['init_time']
    log_file = '%s/ungrib.log' % wps_run_dir
    vtable = config['vtable']
    grb_input_fmt = config['grb_input_fmt']
    grb_input_delay = config.get(
        "grb_input_delay")  # this allows None to be returned

    bdy_conditions = config['bdy_conditions']

    logger.info("\n*** RUNNING UNGRIB ***")

    namelist = shared.read_namelist(namelist_wps)

    bdy_times = shared.get_bdy_times(config)

    if type(grb_input_fmt) != type({}):
        grb_input_fmt = {bdy_conditions: grb_input_fmt}

    if type(vtable) != type({}):
        vtable = {bdy_conditions: vtable}

    #
    # Check that boundary conditions exist
    #
    for key in vtable.keys():

        if grb_input_delay and key in grb_input_delay:
            logger.debug("applying delay")
            delay = datetime.timedelta(0, grb_input_delay[key] * 60 * 60)
            new_bdy_times = [b - delay for b in bdy_times]
        else:
            logger.debug("no delay applied")
            new_bdy_times = bdy_times

        fmt = grb_input_fmt[key]
        #
        # Generate filelist based on the initial time, and the forecast hour
        #
        filelist = list(
            OrderedDict.fromkeys(shared.get_bdy_filenames(fmt, new_bdy_times)))

        #
        # Check the boundary files exist
        #
        logger.debug('checking boundary condition files exists')
        for f in filelist:
            if not os.path.exists(f):
                raise IOError('cannot find file: %s' % f)

    logger.debug('all boundary conditions files exist')

    #
    # Now process boundary conditions
    #
    for key in vtable.keys():

        if grb_input_delay and key in grb_input_delay:
            logger.debug("applying delay")
            delay = datetime.timedelta(0, grb_input_delay[key] * 60 * 60)
            new_bdy_times = [b - delay for b in bdy_times]
        else:
            logger.debug("no delay applied")
            new_bdy_times = bdy_times

        fmt = grb_input_fmt[key]
        #
        # Generate filelist based on the initial time, and the forecast hour
        #
        filelist = list(
            OrderedDict.fromkeys(shared.get_bdy_filenames(fmt, new_bdy_times)))

        logger.debug(
            'running link_grib.csh script to link grib files to GRIBFILE.AAA etc'
        )

        os.chdir(wps_run_dir)
        args = ' '.join(filelist)
        cmd = '%s/link_grib.csh %s' % (wps_run_dir, args)
        shared.run_cmd(cmd, config)

        vtab_path = vtable[key]
        prefix = key
        namelist.update('prefix', key)
        namelist.to_file(namelist_wps)
        link_namelist_wps(config)
        vtab_wps = wps_run_dir + '/Vtable'

        if os.path.exists(vtab_wps):
            os.remove(vtab_wps)
        cmd = 'ln -sf %s %s' % (vtab_path, vtab_wps)
        logger.debug(cmd)
        subprocess.call(cmd, shell=True)
        #logger.debug("changing directory to %s" % wps_run_dir)
        #os.chdir(wps_run_dir)
        cmd = '%s/ungrib.exe' % wps_run_dir

        logger.debug(cmd)
        shared.run(cmd, config, wps_run_dir)

        cmd = 'grep "Successful completion" %s/ungrib.log*' % wps_run_dir  # check for success
        ret = shared.run_cmd(cmd, config)
        if ret != 0:
            raise IOError('ungrib.exe did not complete')

    logger.info('*** SUCESS UNGRIB ***\n')
Exemple #9
0
def ungrib_sst(config):
    """ Runs ungrib.exe for SST fields, makes and modifies a copy of namelist.wps,
    then restores the original namelist.wps"""
    logger = shared.get_logger()

    wps_dir = config['wps_dir']
    wps_run_dir = config['wps_run_dir']
    tmp_dir = config['tmp_dir']
    working_dir = config['working_dir']
    init_time = config['init_time']
    max_dom = config['max_dom']
    sst_local_dir = config['sst_local_dir']
    sst_time = shared.get_sst_time(config)
    sst_filename = shared.get_sst_filename(config)
    vtable_sst = config['sst_vtable']
    vtable = wps_run_dir + '/Vtable'
    queue = config['queue']
    log_file = '%s/ungrib.sst.log' % wps_run_dir
    namelist_wps = config['namelist_wps']
    namelist_sst = '%s/namelist.sst' % working_dir

    namelist = shared.read_namelist(namelist_wps)

    #
    # update one line to point to the new SST field
    # ungrib.exe will name SST field as e.g.
    # SST:2013-04-24_00
    #
    constants_name = '%s/SST:%s' % (wps_run_dir,
                                    sst_time.strftime('%Y-%m-%d_%H'))
    logger.debug('Updating constants_name ----> %s' % constants_name)
    namelist.update('constants_name', constants_name, section='metgrid')

    # Write the changes into the original
    namelist.to_file(namelist_wps)

    #
    # Update start and end time to process SST
    #
    start_str = sst_time.strftime("%Y-%m-%d_%H:%M:%S")
    end_str = sst_time.strftime("%Y-%m-%d_%H:%M:%S")
    logger.debug("Updating namelist.sst")
    logger.debug('PREFIX ------> SST')
    logger.debug('start_date---> ' + start_str)
    logger.debug('end_date-----> ' + end_str)

    namelist.update('prefix', 'SST')
    namelist.update('start_date', [start_str] * max_dom)
    namelist.update('end_date', [end_str] * max_dom)
    logger.debug('writing modified namelist.sst to file -------> %s' %
                 namelist_sst)
    namelist.to_file(namelist_sst)

    #remove any linked namelist.wps
    logger.debug('removing namelist.wps')
    namelist_run = '%s/namelist.wps' % wps_run_dir
    if os.path.exists(namelist_run):
        os.remove(namelist_run)

    # link namelist.sst to namelist.wps in WPS run dir
    logger.debug('linking namelist.sst -----> namelist.wps')
    cmd = 'ln -sf %s %s' % (namelist_sst, namelist_run)
    shared.run_cmd(cmd, config)

    logger.debug('removing Vtable')
    if os.path.exists(vtable):
        os.remove(vtable)
    logger.debug('linking Vtable.SST ----> Vtable')
    cmd = 'ln -sf %s %s' % (vtable_sst, vtable)
    shared.run_cmd(cmd, config)

    # run link_grib to link SST gribs files
    logger.debug('Linking SST GRIB files')
    cmd = '%s/link_grib.csh %s/%s' % (wps_dir, sst_local_dir, sst_filename)
    shared.run_cmd(cmd, config)

    logger.info('\n*** RUNNING UNGRIB FOR SST ***')
    cmd = '%s/ungrib.exe' % wps_run_dir
    shared.run_cmd(cmd, config)

    cmd = 'grep "Successful completion" ./ungrib.log*'  # check for success
    ret = shared.run_cmd(cmd, config)
    if ret != 0:
        raise IOError('Ungrib failed for SST')

    logger.info('*** SUCCESS UNGRIB SST ***\n')
    logger.debug('Removing namelist.wps')
    if os.path.exists(namelist_run):
        os.remove(namelist_run)
    # link in original (unmodified) namelist.wps
    cmd = 'ln -sf %s %s' % (namelist_wps, namelist_run)
    shared.run_cmd(cmd, config)
Exemple #10
0
def update_namelist_input(config):    
    """ Updates the namelist.input file to reflect updated settings in config.
    Adds a non-standard &metadata section to give a name to the model run
    
    Arguments:
    config -- dictionary containing various configuration options
        
    """        
    logger =shared.get_logger()        
    logger.debug('*** UPDATING namelist.input ***')
    

    #wrf_dir       = config['wrf_dir']
    working_dir   = config['working_dir']
    model         = config['model']
    model_run     = config['model_run']
    
    domain        = config['domain']
        
    namelist_run  = '%s/namelist.input'  % working_dir
    namelist_input = config['namelist_input']
    namelist_wps   = config['namelist_wps']
    shutil.copyfile(namelist_input, namelist_input+'.backup')
    
    # read settings from domain-based namelist
    namelist        = shared.read_namelist(namelist_input)   

    # read settings from domain-based namelist.wps
    #namelist_wps   = shared.read_namelist(namelist_wps)   
    #wps_settings   = namelist_wps.settings


    #
    # Add new metadata section to namelist.input
    #
    logger.debug('Adding metatdata to the namelist.input')
    logger.debug('domain = %s'    %domain)
    logger.debug('model = %s'     %model)
    logger.debug('model_run = %s' %model_run)
    namelist.update('domain', domain, 'metadata')
    namelist.update('model',model, 'metadata')
    namelist.update('model_run',model_run, 'metadata')  

    
    #
    # Overule max_dom with one in config
    #
    max_dom = config['max_dom']
    namelist.update('max_dom', max_dom)
    #logger.debug("Syncing dx and dy between namelist.wps and namelist.input")
    #dx = wps_settings['dx']
    #dy = wps_settings['dy']
    #logger.debug("namleist.wps: dx: %s ------> namelist.input" % dx)
    #logger.debug("namleist.wps: dy: %s ------> namelist.input" % dy)
    #namelist.update('dx', wps_settings['dx'])
    #namelist.update('dy', wps_settings['dy'])    
    fcst_hours       = config['fcst_hours']
    fcst_times       = shared.get_fcst_times(config)
    history_interval = config['history_interval']   
    bdy_interval     = config['bdy_interval']  # this is in hours         
    
    interval_seconds = 60*60*bdy_interval
    
    start   = fcst_times[0]
    end     = fcst_times[-1]
    diff    = end - start
    
    
    #
    # I'm still not sure how the WRF namelist works between 
    # the start and end settings and the run_XX settings.
    # I think we can just keep days as zero, and work entirely
    # in hours
    #
    namelist.update('start_year', [start.year] * max_dom)
    namelist.update('start_month',[start.month]* max_dom)
    namelist.update('start_day',  [start.day]  * max_dom)
    namelist.update('start_hour', [start.hour] * max_dom)
    namelist.update('end_year',   [end.year]   * max_dom)
    namelist.update('end_month',  [end.month]  * max_dom)
    namelist.update('end_day',    [end.day]    * max_dom)
    namelist.update('end_hour',   [end.hour]   * max_dom)
    namelist.update('run_days',   [0])   
    namelist.update('run_hours',  [fcst_hours] )   
    #namelist.update('run_minutes',[0])   
    namelist.update('history_interval', [history_interval] * max_dom)
    namelist.update('interval_seconds', [interval_seconds])


    #
    # If DFI is being used, update DFI settings
    # From user guide:
    # "For time specification, it typically needs to integrate 
    # backward for 0.5 to 1 hour, and integrate forward for half of the time."
    #
    # should we just write this everytime into the file and rely of dfi_opt 
    # as the on/off switch?
    #
    hour         = datetime.timedelta(0, 60*60)
    minute       = datetime.timedelta(0, 60)
    #dfi_bck      = config['dfi_bck'] * minute
    #dfi_fwd      = config['dfi_fwd'] * minute
    #dfi_bckstop  = start - dfi_bck
    #dfi_fwdstop  = start + dfi_fwd
    

    #namelist.update('dfi_bckstop_year',   dfi_bckstop.year,   'dfi_control')
    #namelist.update('dfi_bckstop_month',  dfi_bckstop.month,  'dfi_control')
    #namelist.update('dfi_bckstop_day',    dfi_bckstop.day,    'dfi_control')
    #namelist.update('dfi_bckstop_hour',   dfi_bckstop.hour,   'dfi_control')
    #namelist.update('dfi_bckstop_minute', dfi_bckstop.minute, 'dfi_control')
    #namelist.update('dfi_bckstop_second', dfi_bckstop.second, 'dfi_control')
    #namelist.update('dfi_fwdstop_year',   dfi_fwdstop.year,   'dfi_control')
    #namelist.update('dfi_fwdstop_month',  dfi_fwdstop.month,  'dfi_control')
    #namelist.update('dfi_fwdstop_day',    dfi_fwdstop.day,    'dfi_control')
    #namelist.update('dfi_fwdstop_hour',   dfi_fwdstop.hour,   'dfi_control')
    #namelist.update('dfi_fwdstop_minute', dfi_fwdstop.minute, 'dfi_control')
    #namelist.update('dfi_fwdstop_second', dfi_fwdstop.second, 'dfi_control')
   
    logger.debug('writing new settings to file')
    namelist.to_file(namelist_input)
    
    #logger.debug(namelist)
    logger.debug('*** FINISHED UPDATING namelist.input ***')  
Exemple #11
0
def run_ungrib(config):
    """ Runs ungrib.exe and checks output was sucessfull
    If vtable and gbr_input_fmt are NOT dictionaries, 
    then dictionarius will be constructed from them using 
    the key bdy_conditions from the metadata
    
    Arguments:
    config -- dictionary specifying configuration options
    
    """
    logger        =shared.get_logger()
    wps_dir       = config['wps_dir']
    wps_run_dir   = config['wps_run_dir']
    namelist_wps  = config['namelist_wps']
    working_dir   = config['working_dir']    
    met_em_dir    = config['met_em_dir']
    init_time     = config['init_time']
    log_file      = '%s/ungrib.log' % wps_run_dir
    vtable        = config['vtable']
    grb_input_fmt  = config['grb_input_fmt']
    grb_input_delay = config.get("grb_input_delay")  # this allows None to be returned 
    
    bdy_conditions = config['bdy_conditions']
    
    
    
    
    
    logger.info("\n*** RUNNING UNGRIB ***")
    
    namelist = shared.read_namelist(namelist_wps)
    
    bdy_times     = shared.get_bdy_times(config)
    

    if type(grb_input_fmt)!=type({}):
        grb_input_fmt = {bdy_conditions:grb_input_fmt}

    if type(vtable)!=type({}):
        vtable = {bdy_conditions:vtable}


    #
    # Check that boundary conditions exist
    #     
    for key in vtable.keys():
        
        
        if grb_input_delay and key in grb_input_delay:
            logger.debug("applying delay")
            delay = datetime.timedelta(0, grb_input_delay[key]*60*60)
            new_bdy_times = [b - delay for b in bdy_times]
        else:
            logger.debug("no delay applied")
            new_bdy_times = bdy_times
        
        fmt = grb_input_fmt[key]
        #
        # Generate filelist based on the initial time, and the forecast hour
        #        
        filelist = list(OrderedDict.fromkeys(shared.get_bdy_filenames(fmt, new_bdy_times)))

        #
        # Check the boundary files exist
        #
        logger.debug('checking boundary condition files exists')    
        for f in filelist:
            if not os.path.exists(f):
                raise IOError('cannot find file: %s' %f)
        
    
    
    logger.debug('all boundary conditions files exist')
    
    #
    # Now process boundary conditions
    #
    for key in vtable.keys():

        if grb_input_delay and key in grb_input_delay:
            logger.debug("applying delay")
            delay = datetime.timedelta(0, grb_input_delay[key]*60*60)
            new_bdy_times = [b - delay for b in bdy_times]
        else:
            logger.debug("no delay applied")
            new_bdy_times = bdy_times
        
        fmt = grb_input_fmt[key]
        #
        # Generate filelist based on the initial time, and the forecast hour
        #        
        filelist = list(OrderedDict.fromkeys(shared.get_bdy_filenames(fmt, new_bdy_times)))

        
        logger.debug('running link_grib.csh script to link grib files to GRIBFILE.AAA etc')
        
        os.chdir(wps_run_dir)
        args = ' '.join(filelist)
        cmd = '%s/link_grib.csh %s' %(wps_run_dir,args)
        shared.run_cmd(cmd, config)
  
        vtab_path = vtable[key]
        prefix = key
        namelist.update('prefix', key)
        namelist.to_file(namelist_wps)
        link_namelist_wps(config)
        vtab_wps  = wps_run_dir+'/Vtable'

        if os.path.exists(vtab_wps):
            os.remove(vtab_wps)
        cmd = 'ln -sf %s %s' %(vtab_path, vtab_wps)
        logger.debug(cmd)
        subprocess.call(cmd, shell=True)    
        #logger.debug("changing directory to %s" % wps_run_dir)
        #os.chdir(wps_run_dir)
        cmd     =  '%s/ungrib.exe' % wps_run_dir
        
        logger.debug(cmd)
        shared.run(cmd, config, wps_run_dir)

        cmd = 'grep "Successful completion" %s/ungrib.log*' % wps_run_dir # check for success
        ret =shared.run_cmd(cmd,config)
        if ret!=0:
            raise IOError('ungrib.exe did not complete')
    
    logger.info('*** SUCESS UNGRIB ***\n')
Exemple #12
0
def ungrib_sst(config):
    """ Runs ungrib.exe for SST fields, makes and modifies a copy of namelist.wps,
    then restores the original namelist.wps"""
    logger = shared.get_logger()
    
    wps_dir      = config['wps_dir']
    wps_run_dir  = config['wps_run_dir']
    tmp_dir      = config['tmp_dir']
    working_dir  = config['working_dir']
    init_time    = config['init_time']
    max_dom      = config['max_dom']
    sst_local_dir = config['sst_local_dir']
    sst_time     = shared.get_sst_time(config)
    sst_filename = shared.get_sst_filename(config)
    vtable_sst   = config['sst_vtable']
    vtable       = wps_run_dir+'/Vtable'
    queue        = config['queue']
    log_file     = '%s/ungrib.sst.log' % wps_run_dir
    namelist_wps  = config['namelist_wps']
    namelist_sst  = '%s/namelist.sst' % working_dir

    namelist      = shared.read_namelist(namelist_wps)

    #
    # update one line to point to the new SST field
    # ungrib.exe will name SST field as e.g.
    # SST:2013-04-24_00
    #
    constants_name = '%s/SST:%s' %(wps_run_dir, sst_time.strftime('%Y-%m-%d_%H'))
    logger.debug('Updating constants_name ----> %s' % constants_name)
    namelist.update('constants_name', constants_name, section='metgrid')

    # Write the changes into the original
    namelist.to_file(namelist_wps)

    #
    # Update start and end time to process SST
    #
    start_str  = sst_time.strftime("%Y-%m-%d_%H:%M:%S")
    end_str    = sst_time.strftime("%Y-%m-%d_%H:%M:%S")
    logger.debug("Updating namelist.sst")
    logger.debug('PREFIX ------> SST')
    logger.debug('start_date---> ' +start_str)
    logger.debug('end_date-----> '+ end_str)

    namelist.update('prefix', 'SST')
    namelist.update('start_date', [start_str]*max_dom)
    namelist.update('end_date',   [end_str]*max_dom)
    logger.debug('writing modified namelist.sst to file -------> %s' % namelist_sst)
    namelist.to_file(namelist_sst)

    #remove any linked namelist.wps 
    logger.debug('removing namelist.wps')
    namelist_run = '%s/namelist.wps' % wps_run_dir
    if os.path.exists(namelist_run):
        os.remove(namelist_run)

    # link namelist.sst to namelist.wps in WPS run dir
    logger.debug('linking namelist.sst -----> namelist.wps')
    cmd = 'ln -sf %s %s' %(namelist_sst, namelist_run)
    shared.run_cmd(cmd, config)

    logger.debug('removing Vtable')
    if os.path.exists(vtable):
        os.remove(vtable)
    logger.debug('linking Vtable.SST ----> Vtable')
    cmd = 'ln -sf %s %s' %(vtable_sst, vtable)
    shared.run_cmd(cmd, config)

    # run link_grib to link SST gribs files
    logger.debug('Linking SST GRIB files')
    cmd = '%s/link_grib.csh %s/%s' %(wps_dir, sst_local_dir, sst_filename)
    shared.run_cmd(cmd, config)


    logger.info('\n*** RUNNING UNGRIB FOR SST ***')
    cmd     =  '%s/ungrib.exe' % wps_run_dir
    shared.run_cmd(cmd, config)

    cmd = 'grep "Successful completion" ./ungrib.log*' # check for success
    ret = shared.run_cmd(cmd, config)
    if ret!=0:
        raise IOError('Ungrib failed for SST')
    
    logger.info('*** SUCCESS UNGRIB SST ***\n')
    logger.debug('Removing namelist.wps')
    if os.path.exists(namelist_run): 
        os.remove(namelist_run)
    # link in original (unmodified) namelist.wps
    cmd = 'ln -sf %s %s' %(namelist_wps, namelist_run)    
    shared.run_cmd(cmd, config)