Beispiel #1
0
def link_namelist_wps(config):
    """Links namelist_wps into wps_run_dir"""

    logger = shared.get_logger()
    #
    # link namelist.wps file from domain dir to
    # wps dir. Fist check it exists
    #
    namelist_wps = config['namelist_wps']
    namelist_run = '%s/namelist.wps' % config['wps_run_dir']

    #
    # Check if namelist exists in working_dir directory
    #
    if not os.path.exists(namelist_wps):
        raise IOError('could not find namelist.wps file: %s' % namelist_wps)

    if os.path.exists(namelist_run):
        logger.debug('removing existing namelist.wps file: %s' % namelist_run)
        os.remove(namelist_run)

    #
    # Execute this command even in a dummy run, so that namelist file is linked
    # correctly and can be updated by other commands
    #
    cmd = 'ln -sf %s %s ' % (namelist_wps, namelist_run)
    logger.debug(cmd)
    subprocess.call(cmd, shell=True)
Beispiel #2
0
def add_metadata(config):
    """ Adds metadata tags into the wrfout files. Expects there to be one 
    wrfout file per init_time. If there are more, they will not have metadata added."""

    logger = shared.get_logger()
    logger.info("*** Adding metadata to wrfout files ***")

    wrfout_dir = config['wrfout_dir']
    init_time = config['init_time']
    max_dom = config['max_dom']

    metadata = config['metadata']
    logger.debug(metadata)
    wrfout_files = [
        '%s/wrfout_d%02d_%s' %
        (wrfout_dir, d, init_time.strftime('%Y-%m-%d_%H:%M:%S'))
        for d in range(1, max_dom + 1)
    ]

    for f in wrfout_files:
        logger.debug("compressing %s" % f)
        if not os.path.exists(f):
            raise MissingFile("could not find %s" % f)

        # create attribute description for ncatted
        # note that we make the attribute names uppercase for consistency with WRF output
        att_defs = ' '.join([
            '-a %s,global,c,c,"%s"' % (s.upper(), config[s]) for s in metadata
        ])
        logger.debug(att_defs)
        cmd = 'ncatted -O -h %s %s' % (att_defs, f)
        logger.debug(cmd)
        shared.run_cmd(cmd, config)
Beispiel #3
0
def prepare(config):
    """Removes files specified in pre_clean. Creates subdirectories specfied in create_dirs,
    links files specified in wrf_links into wrf_working_dir."""

   
    working_dir    = config['working_dir']
    
    links          = config['prepare.link']
    pre_clean      = config['prepare.remove'] 
    subdirs        = config['prepare.create']

    logger         = shared.get_logger()
    logger.info('\n*** PREPARING ***')

    if not os.path.exists(working_dir):
        os.makedirs(working_dir)
    
    #fulldirs = [ working_dir+'/'+d for d in subdirs ]
    fulldirs  = subdirs 
    for d in fulldirs:
        if not os.path.exists(d):
            logger.debug('creating directory %s ' %d)
            os.mkdir(d) 
   
    for pattern in pre_clean:
        flist = glob.glob(pattern)
        for f in flist:
            if os.path.exists(f):
                logger.debug('removing file: %s' % f )
                os.remove(f)


    for pattern in links:
        shared.link(pattern)
    logger.info('*** DONE PREPARE ***\n')
Beispiel #4
0
def hyperslab(config):

    logger = shared.get_logger()
    logger.info("*** Hyperslabbing wrfout files ***")
    wrfout_dir = config['wrfout_dir']
    init_time = config['init_time']
    max_dom = config['max_dom']
    dimspec = config['post.hyperslab.dimspec']

    wrfout_files = [
        '%s/wrfout_d%02d_%s' %
        (wrfout_dir, d, init_time.strftime('%Y-%m-%d_%H:%M:%S'))
        for d in range(1, max_dom + 1)
    ]
    for f in wrfout_files:
        if not os.path.exists(f):
            raise MissingFile("could not find %s" % f)
        tmp_name = f + '.tmp'
        logger.debug("compressing %s to temporary file: %s" % (f, tmp_name))
        cmd = 'ncks -4 -O %s %s %s' % (dimspec, f, tmp_name)
        shared.run(cmd, config)
        if not os.path.exists(tmp_name):
            raise IOError("compression failed for %s" % f)

        os.remove('%s' % f)
        os.rename(f + '.tmp', f)

    logger.info("*** Done hyperslabbing wrfout files ***")
Beispiel #5
0
def run_wrf(config):
    """ Run wrf.exe and check output was sucessful
    
    Arguments:
    config -- dictionary containing various configuration options
    
    """
    logger = shared.get_logger()
    logger.info('\n*** RUNNNING WRF ***')
    queue = config['queue']
    wrf_run_dir = config['wrf_run_dir']
    log_file = '%s/wrf.log' % wrf_run_dir

    executable = '%s/wrf.exe' % wrf_run_dir
    shared.run(executable, config, from_dir=wrf_run_dir)

    #
    # Check for success
    #
    cmd = 'grep "SUCCESS COMPLETE" %s/rsl.error.0000' % wrf_run_dir
    ret = shared.run_cmd(cmd, config)
    if ret != 0:
        raise IOError('wrf.exe did not complete')

    logger.info('*** SUCESS WRF ***\n')
Beispiel #6
0
def transfer_to_web_dir(config):
    """ Transfers all plots in output folder to web folder"""

    logger = shared.get_logger()
    logger.debug('Transferring plot files to web dir')
    init_time = config['init_time']
    full_trace = config['full_trace']
    ncl_out_dir = shared.sub_date(config['ncl_out_dir'], init_time=init_time)
    ncl_web_dir = shared.sub_date(config['ncl_web_dir'], init_time=init_time)

    if not os.path.exists(ncl_web_dir):
        os.makedirs(ncl_web_dir)

    flist = glob.glob(ncl_out_dir + '/*')
    shared.transfer(flist, ncl_web_dir, mode='copy', debug_level='NONE')

    ncl_out_dir = shared.sub_date(config['ncl_ol_out_dir'],
                                  init_time=init_time)
    ncl_web_dir = shared.sub_date(config['ncl_ol_web_dir'],
                                  init_time=init_time)

    if not os.path.exists(ncl_web_dir):
        os.makedirs(ncl_web_dir)

    flist = glob.glob(ncl_out_dir + '/*')
    shared.transfer(flist, ncl_web_dir, mode='copy', debug_level='NONE')
Beispiel #7
0
 def __init__(self, logger=None):
     """
     Constructor to initialize the an instance of type JsonLoaderService
     :param logger: the logger to be used
     """
     self._logger = logger or get_logger('TEST')
     self.dao = JsonDao(logger)
Beispiel #8
0
def run_wrf(config):
    """ Run wrf.exe and check output was sucessful
    
    Arguments:
    config -- dictionary containing various configuration options
    
    """
    logger          =shared.get_logger()    
    logger.info('\n*** RUNNNING WRF ***')
    queue         = config['queue']
    wrf_run_dir   = config['wrf_run_dir']
    log_file      = '%s/wrf.log' % wrf_run_dir
    
    executable  = '%s/wrf.exe' % wrf_run_dir
    shared.run(executable, config, from_dir=wrf_run_dir)
    

    #
    # Check for success
    #    
    cmd = 'grep "SUCCESS COMPLETE" %s/rsl.error.0000' % wrf_run_dir
    ret = shared.run_cmd(cmd, config)
    if ret!=0:
        raise IOError('wrf.exe did not complete')
    
    logger.info('*** SUCESS WRF ***\n')
Beispiel #9
0
def hyperslab(config):
    
    logger=shared.get_logger()
    logger.info("*** Hyperslabbing wrfout files ***")
    wrfout_dir = config['wrfout_dir']
    init_time  = config['init_time']
    max_dom    = config['max_dom']
    dimspec    = config['post.hyperslab.dimspec']
    
    
    wrfout_files = ['%s/wrfout_d%02d_%s' %(wrfout_dir, d, init_time.strftime('%Y-%m-%d_%H:%M:%S')) for d in range(1,max_dom+1)]
    for f in wrfout_files:
        if not os.path.exists(f):
            raise MissingFile("could not find %s" % f)
        tmp_name = f + '.tmp'
        logger.debug("compressing %s to temporary file: %s" % (f, tmp_name))
        cmd = 'ncks -4 -O %s %s %s' % (dimspec, f, tmp_name)
        shared.run(cmd, config)
        if not os.path.exists(tmp_name):
            raise IOError("compression failed for %s" % f)
        
        os.remove('%s' %f)
        os.rename(f+'.tmp', f) 
    
    logger.info("*** Done hyperslabbing wrfout files ***")       
Beispiel #10
0
def timing(config):
    """Reads a rsl file from WRF and works out timing information
    from that """

    #
    # Where should we assume to find the rsl file?
    # In the wrf_working_dir
    #
    logger =shared.get_logger()
    logger.info('*** Computing timing information ***')
    wrf_run_dir    = config['wrf_run_dir']
    rsl_file       = '%s/rsl.error.0000' % wrf_run_dir
    namelist_input = config['namelist_input']
    namelist       = shared.read_namelist(namelist_input).settings
    timestep       = namelist['time_step'][0]
    f              = open(rsl_file, 'r')
    lines          = f.read().split('\n')
    
    # get timings on outer domain
    main_times  = [float(l.split()[8]) for l in lines if re.search("^Timing for main: time .* 1:", l)]
    total       = sum(main_times)
    steps       = len(main_times)
    time_per_step = (total/steps)
    x_real       = timestep / time_per_step
    logger.info('\n*** TIMING INFORMATION ***')
    logger.info('\t %d outer timesteps' % steps)
    logger.info('\t %0.3f elapsed seconds' % total)
    logger.info('\t %0.3f seconds per timestep' % time_per_step )
    logger.info('\t %0.3f times real time' %x_real)
    logger.info('*** END TIMING INFORMATION ***\n')    
Beispiel #11
0
def run_geogrid(config):
    """ Runs geogrid.exe and checks output was sucessful
    
    Arguments:
    config -- dictionary specifying configuration options
    
    """
    logger =shared.get_logger()
    logger.info("\n*** RUNINING GEOGRID ***")
    wps_run_dir    = config['wps_run_dir']
    os.chdir(wps_run_dir)

    queue          = config['queue']
    log_file       = '%s/geogrid.log' % wps_run_dir
    
    geogrid_wps = '%(wps_run_dir)s/GEOGRID.TBL' % config

    if not os.path.exists(geogrid_wps):
        raise IOError("Could not find GEOGRID.TBL at: %s " % geogrid_wps)
    

    cmd       =  '%s/geogrid.exe' % wps_run_dir
    
    shared.run(cmd, config, wps_run_dir)
    
    cmd = 'grep "Successful completion" %s/geogrid.log*' %(wps_run_dir)
    ret =shared.run_cmd(cmd, config)
    if ret!=0:
        raise IOError('geogrid.exe did not complete')

    logger.info('*** SUCESS GEOGRID ***\n')
Beispiel #12
0
def link_namelist_wps(config):
    """Links namelist_wps into wps_run_dir"""    
    
    logger = shared.get_logger()
    #
    # link namelist.wps file from domain dir to 
    # wps dir. Fist check it exists
    #
    namelist_wps = config['namelist_wps']
    namelist_run = '%s/namelist.wps' % config['wps_run_dir']
    
    #
    # Check if namelist exists in working_dir directory
    #    
    if not os.path.exists(namelist_wps):
        raise IOError('could not find namelist.wps file: %s' % namelist_wps)
        
    if os.path.exists(namelist_run):
        logger.debug('removing existing namelist.wps file: %s' % namelist_run)
        os.remove(namelist_run)
    
    #
    # Execute this command even in a dummy run, so that namelist file is linked
    # correctly and can be updated by other commands
    #
    cmd = 'ln -sf %s %s ' %(namelist_wps, namelist_run)
    logger.debug(cmd)
    subprocess.call(cmd, shell=True)
Beispiel #13
0
def run_geogrid(config):
    """ Runs geogrid.exe and checks output was sucessful
    
    Arguments:
    config -- dictionary specifying configuration options
    
    """
    logger = shared.get_logger()
    logger.info("\n*** RUNINING GEOGRID ***")
    wps_run_dir = config['wps_run_dir']
    os.chdir(wps_run_dir)

    queue = config['queue']
    log_file = '%s/geogrid.log' % wps_run_dir

    geogrid_wps = '%(wps_run_dir)s/GEOGRID.TBL' % config

    if not os.path.exists(geogrid_wps):
        raise IOError("Could not find GEOGRID.TBL at: %s " % geogrid_wps)

    cmd = '%s/geogrid.exe' % wps_run_dir

    shared.run(cmd, config, wps_run_dir)

    cmd = 'grep "Successful completion" %s/geogrid.log*' % (wps_run_dir)
    ret = shared.run_cmd(cmd, config)
    if ret != 0:
        raise IOError('geogrid.exe did not complete')

    logger.info('*** SUCESS GEOGRID ***\n')
Beispiel #14
0
def prepare_ndown(config):
    """Runs a one-way nested simulation using ndown.exe
    We assume the coarse resolution run has been done, 
    and we have wrfout_d01.date files.
    
    We only need to run metgrid for the initial forecast time.
    
    We have two options, either we force the user to do all the renaming themselves, 
    or we allow them to utilise the original namelist.input file, and add effectivley
    add a column onto that. This could be done via a bunch of smaller utility steps.
    e.g. shift_namelist namelist.input 3 > namelist.input
    
    Which would rotate the columns of a namelist.input file so that the n-th column 
    becomes the first column.
    
        
    Therefore we have to run ungrib, geogrid, metgrid
    Assume the geo_em files exist for both domains.
    What """

    logger = shared.get_logger()
    logger.info('*** PREPARING NDOWN ***')
    namelist_wps = config['namelist_wps']
    namelist_input = config['namelist_input']
    max_dom = config['max_dom']
    wrf_run_dir = config['wrf_run_dir']

    if max_dom != 2:
        raise ConfigError("max_dom must equal 2 when doing ndown runs")

    bdy_times = shared.get_bdy_times(config)
    ndown_fmt = config['ndown_fmt']

    wrfout_d01_files = [
        shared.sub_date(ndown_fmt, init_time=bdy_times[0], valid_time=t)
        for t in bdy_times
    ]
    for f in wrfout_d01_files:
        if not os.path.exists(f):
            raise MissingFile("File: %s missing" % f)
        cmd = 'ln -sf %s %s' % (f, wrf_run_dir)
        shared.run_cmd(cmd, config)

    # Check for wrfinput_d02
    wrfinput_d02 = '%s/wrfinput_d02' % wrf_run_dir
    if not os.path.exists(wrfinput_d02):
        raise MissingFile("wrfinput_d02 is missing")

    os.rename('%s/wrfinput_d02' % wrf_run_dir, '%s/wrfndi_d02' % wrf_run_dir)

    namelist = read_namelist(namelist_input)

    # History interval is in minutes
    history_interval = namelist.settings['history_interval']
    interval_seconds = history_interval[0] * 60
    namelist.update('interval_seconds', interval_seconds)
    namelist.insert('io_form_auxinput2', 2, 'time_control')
    namelist.to_file(namelist_input)

    logger.info('*** DONE PREPARE NDOWN ***')
Beispiel #15
0
def prepare_ndown(config):
    """Runs a one-way nested simulation using ndown.exe
    We assume the coarse resolution run has been done, 
    and we have wrfout_d01.date files.
    
    We only need to run metgrid for the initial forecast time.
    
    We have two options, either we force the user to do all the renaming themselves, 
    or we allow them to utilise the original namelist.input file, and add effectivley
    add a column onto that. This could be done via a bunch of smaller utility steps.
    e.g. shift_namelist namelist.input 3 > namelist.input
    
    Which would rotate the columns of a namelist.input file so that the n-th column 
    becomes the first column.
    
        
    Therefore we have to run ungrib, geogrid, metgrid
    Assume the geo_em files exist for both domains.
    What """

    logger =shared.get_logger()
    logger.info('*** PREPARING NDOWN ***')
    namelist_wps   = config['namelist_wps']
    namelist_input = config['namelist_input']
    max_dom        = config['max_dom']
    wrf_run_dir    = config['wrf_run_dir']
    
    
    if max_dom!=2:
        raise ConfigError("max_dom must equal 2 when doing ndown runs")
    
    bdy_times = shared.get_bdy_times(config)
    ndown_fmt = config['ndown_fmt']
    
    wrfout_d01_files = [shared.sub_date(ndown_fmt, init_time=bdy_times[0], valid_time=t) for t in bdy_times]
    for f in wrfout_d01_files:
        if not os.path.exists(f):
            raise MissingFile("File: %s missing" % f)
        cmd = 'ln -sf %s %s' % (f, wrf_run_dir)
        shared.run_cmd(cmd, config)
    
    
    # Check for wrfinput_d02
    wrfinput_d02 = '%s/wrfinput_d02' % wrf_run_dir
    if not os.path.exists(wrfinput_d02):
        raise MissingFile("wrfinput_d02 is missing")
    
    os.rename('%s/wrfinput_d02' % wrf_run_dir, '%s/wrfndi_d02' % wrf_run_dir)
    
    
    namelist         = read_namelist(namelist_input)
    
    # History interval is in minutes
    history_interval = namelist.settings['history_interval']
    interval_seconds = history_interval[0] * 60
    namelist.update('interval_seconds', interval_seconds)
    namelist.insert('io_form_auxinput2', 2, 'time_control')
    namelist.to_file(namelist_input)
    
    logger.info('*** DONE PREPARE NDOWN ***')
Beispiel #16
0
def timing(config):
    """Reads a rsl file from WRF and works out timing information
    from that """

    #
    # Where should we assume to find the rsl file?
    # In the wrf_working_dir
    #
    logger = shared.get_logger()
    logger.info('*** Computing timing information ***')
    wrf_run_dir = config['wrf_run_dir']
    rsl_file = '%s/rsl.error.0000' % wrf_run_dir
    namelist_input = config['namelist_input']
    namelist = shared.read_namelist(namelist_input).settings
    timestep = namelist['time_step'][0]
    f = open(rsl_file, 'r')
    lines = f.read().split('\n')

    # get timings on outer domain
    main_times = [
        float(l.split()[8]) for l in lines
        if re.search("^Timing for main: time .* 1:", l)
    ]
    total = sum(main_times)
    steps = len(main_times)
    time_per_step = (total / steps)
    x_real = timestep / time_per_step
    logger.info('\n*** TIMING INFORMATION ***')
    logger.info('\t %d outer timesteps' % steps)
    logger.info('\t %0.3f elapsed seconds' % total)
    logger.info('\t %0.3f seconds per timestep' % time_per_step)
    logger.info('\t %0.3f times real time' % x_real)
    logger.info('*** END TIMING INFORMATION ***\n')
Beispiel #17
0
def add_metadata(config):
    """ Adds metadata tags into the wrfout files. Expects there to be one 
    wrfout file per init_time. If there are more, they will not have metadata added."""

    logger = shared.get_logger()
    logger.info("*** Adding metadata to wrfout files ***")


    wrfout_dir = config['wrfout_dir']
    init_time  = config['init_time']
    max_dom    = config['max_dom']
    
    metadata = config['metadata']
    logger.debug(metadata)
    wrfout_files = ['%s/wrfout_d%02d_%s' %(wrfout_dir, d, init_time.strftime('%Y-%m-%d_%H:%M:%S')) for d in range(1,max_dom+1)]
    
    for f in wrfout_files:
        logger.debug("compressing %s" % f)
        if not os.path.exists(f):
            raise MissingFile("could not find %s" % f)
        
        # create attribute description for ncatted 
        # note that we make the attribute names uppercase for consistency with WRF output
        att_defs = ' '.join(['-a %s,global,c,c,"%s"' %(s.upper(), config[s]) for s in metadata])
        logger.debug(att_defs)
        cmd = 'ncatted -O -h %s %s' % (att_defs, f)
        logger.debug(cmd)
        shared.run_cmd(cmd, config)
Beispiel #18
0
def compress(config):
    """Compresses netcdf files to netcdf4 format. Relies on 
    the NCO operator nccopy.  Will try and compress all output netcdf files
    associated with the current initial time, based on the standard WRF naming 
    convention.  If a simulation produces multiple wrfout files for an
    initial time (i.e. one file per day for three days), then only the first file
    will be compressed under the current configuration.
    
    nccopy does not support the -O overwrite flag, so we need to manually rename the files,
    and remove the originals on sucess"""
    
    logger=shared.get_logger()
    logger.info("*** Compressing wrfout files ***")
    wrfout_dir = config['wrfout_dir']
    init_time  = config['init_time']
    max_dom    = config['max_dom']
    comp_level = config['compression_level']
    
    
    wrfout_files = ['%s/wrfout_d%02d_%s' %(wrfout_dir, d, init_time.strftime('%Y-%m-%d_%H:%M:%S')) for d in range(1,max_dom+1)]
    for f in wrfout_files:
        if not os.path.exists(f):
            raise MissingFile("could not find %s" % f)
        tmp_name = f + '.tmp'
        logger.debug("compressing %s to temporary file: %s" % (f, tmp_name))
        cmd = 'nccopy -k4 -d %s %s %s' %(comp_level, f, tmp_name)
        shared.run(cmd, config)
        if not os.path.exists(tmp_name):
            raise IOError("compression failed for %s" % f)
        
        os.remove('%s' %f)
        os.rename(f+'.tmp', f) 
    
    logger.info("*** Done compressing wrfout files ***")        
Beispiel #19
0
def update_namelist_wps(config):
    """ Updates the namelist.wps to reflect updated settings in config
    
    Arguments:
    config -- dictionary containing various configuration options
        
    """
    logger = shared.get_logger()
    logger.debug('*** UPDATING namelist.wps ***')

    #domain_dir = config['domain_dir']
    #model_run  = config['model_run']
    #wps_dir    = config['wps_dir']
    wps_run_dir = config['wps_run_dir']  # required for opt_geogrid_tbl_path
    #bdy_conditions = config['bdy_conditions']

    namelist_wps = config['namelist_wps']
    shutil.copyfile(namelist_wps, namelist_wps + '.backup')

    bdy_times = shared.get_bdy_times(config)

    max_dom = config['max_dom']
    init_time = config['init_time']

    met_em_dir = shared.sub_date(config['met_em_dir'], init_time=init_time)
    geo_em_dir = config['geo_em_dir']

    bdy_interval = config['bdy_interval']
    interval_seconds = bdy_interval * 60 * 60

    logger.debug('reading namelist.wps <--------- %s' % namelist_wps)
    namelist = shared.read_namelist(namelist_wps)

    #
    # Update some options based on the forecast config file
    #
    namelist.update('max_dom', max_dom)
    namelist.update('opt_output_from_geogrid_path',
                    geo_em_dir,
                    section='share')
    namelist.update('opt_geogrid_tbl_path', wps_run_dir, section='geogrid')
    namelist.update('opt_metgrid_tbl_path', wps_run_dir, section='metgrid')
    namelist.update('interval_seconds', [interval_seconds])

    #
    # Generate formatted strings for inclusion in the namelist.wps file
    #
    start_str = bdy_times[0].strftime("%Y-%m-%d_%H:%M:%S")
    end_str = bdy_times[-1].strftime("%Y-%m-%d_%H:%M:%S")
    logger.debug("Updating namelist.wps start and end")
    logger.debug(start_str)
    logger.debug(end_str)

    namelist.update('start_date', [start_str] * max_dom)
    namelist.update('end_date', [end_str] * max_dom)

    logger.debug('writing modified namelist.wps to file')
    namelist.to_file(namelist_wps)
    logger.debug('*** FINISHED UPDATING namelist.wps ***')
Beispiel #20
0
def update_namelist_wps(config):
    """ Updates the namelist.wps to reflect updated settings in config
    
    Arguments:
    config -- dictionary containing various configuration options
        
    """    
    logger     = shared.get_logger()
    logger.debug('*** UPDATING namelist.wps ***')

    #domain_dir = config['domain_dir']
    #model_run  = config['model_run']
    #wps_dir    = config['wps_dir']
    wps_run_dir= config['wps_run_dir']          # required for opt_geogrid_tbl_path
    #bdy_conditions = config['bdy_conditions'] 
    
    namelist_wps = config['namelist_wps']
    shutil.copyfile(namelist_wps, namelist_wps+'.backup')
    
    bdy_times  = shared.get_bdy_times(config)
    
    max_dom    = config['max_dom']
    init_time  = config['init_time']

    met_em_dir = shared.sub_date(config['met_em_dir'], init_time=init_time)
    geo_em_dir = config['geo_em_dir']

    bdy_interval = config['bdy_interval']
    interval_seconds = bdy_interval * 60 * 60

    logger.debug('reading namelist.wps <--------- %s' % namelist_wps)
    namelist = shared.read_namelist(namelist_wps)

    #
    # Update some options based on the forecast config file
    #
    namelist.update('max_dom', max_dom)
    namelist.update('opt_output_from_geogrid_path', geo_em_dir, section='share')
    namelist.update('opt_geogrid_tbl_path', wps_run_dir, section='geogrid')
    namelist.update('opt_metgrid_tbl_path', wps_run_dir, section='metgrid')
    namelist.update('interval_seconds', [interval_seconds])
    
    #
    # Generate formatted strings for inclusion in the namelist.wps file
    #
    start_str  = bdy_times[0].strftime("%Y-%m-%d_%H:%M:%S")
    end_str    = bdy_times[-1].strftime("%Y-%m-%d_%H:%M:%S")
    logger.debug("Updating namelist.wps start and end")
    logger.debug(start_str)
    logger.debug(end_str)

    namelist.update('start_date', [start_str]*max_dom)
    namelist.update('end_date',   [end_str]*max_dom)

        
    logger.debug('writing modified namelist.wps to file')
    namelist.to_file(namelist_wps)
    logger.debug('*** FINISHED UPDATING namelist.wps ***')
Beispiel #21
0
def run_metgrid(config):
    """ Runs metgrid.exe and checks output was sucessful
    
    Arguments:
    config -- dictionary specifying configuration options
    
    """
    logger = shared.get_logger()
    logger.info("\n*** RUNNING METGRID ***")

    queue = config['queue']
    wps_run_dir = config['wps_run_dir']
    log_file = '%s/metgrid.log' % wps_run_dir
    bdy_conditions = config['bdy_conditions']
    namelist_wps = config['namelist_wps']
    namelist = shared.read_namelist(namelist_wps)

    met_em_dir = shared.sub_date(config['met_em_dir'], config['init_time'])

    #
    # vtable may be a dictionary to support running ungrib multiple
    # times. In which case, we need to put multiple prefixes into
    # the namelist.wps file
    #

    vtable = config['vtable']

    if type(vtable) == type({}):
        prefixes = vtable.keys()
    else:
        prefixes = [bdy_conditions]

    namelist.update('fg_name', prefixes)
    namelist.update('opt_output_from_metgrid_path',
                    met_em_dir,
                    section='metgrid')
    if not config['sst']:
        namelist.remove('constants_name')

    namelist.to_file(namelist_wps)

    logger.debug('met_em_dir: %s' % met_em_dir)
    if not os.path.exists(met_em_dir):
        logger.debug('creating met_em_dir: %s ' % met_em_dir)
        os.makedirs(met_em_dir)

    os.chdir(wps_run_dir)
    cmd = "%s/metgrid.exe" % wps_run_dir

    shared.run(cmd, config, from_dir=wps_run_dir)

    cmd = 'grep "Successful completion" %s/metgrid.log*' % wps_run_dir
    ret = shared.run_cmd(cmd, config)
    if ret != 0:
        raise IOError('metgrid.exe did not complete')

    logger.info('*** SUCESS METGRID ***\n')
Beispiel #22
0
def run_metgrid(config):
    """ Runs metgrid.exe and checks output was sucessful
    
    Arguments:
    config -- dictionary specifying configuration options
    
    """
    logger =shared.get_logger()
    logger.info("\n*** RUNNING METGRID ***")
    
    queue          = config['queue']
    wps_run_dir    = config['wps_run_dir']
    log_file       = '%s/metgrid.log' % wps_run_dir
    bdy_conditions = config['bdy_conditions']
    namelist_wps   = config['namelist_wps']
    namelist       = shared.read_namelist(namelist_wps)
    
    met_em_dir     = shared.sub_date(config['met_em_dir'], config['init_time'])        
    
    #
    # vtable may be a dictionary to support running ungrib multiple
    # times. In which case, we need to put multiple prefixes into
    # the namelist.wps file
    #
    
    vtable = config['vtable']
    
    if type(vtable)==type({}):
        prefixes = vtable.keys()
    else:
        prefixes = [bdy_conditions]    

        
    namelist.update('fg_name', prefixes)
    namelist.update('opt_output_from_metgrid_path', met_em_dir, section='metgrid')
    if not config['sst']:
        namelist.remove('constants_name')
        
    namelist.to_file(namelist_wps)
    
    logger.debug('met_em_dir: %s' % met_em_dir)
    if not os.path.exists(met_em_dir):
        logger.debug('creating met_em_dir: %s ' % met_em_dir)
        os.makedirs(met_em_dir)

    os.chdir(wps_run_dir)
    cmd      =  "%s/metgrid.exe" % wps_run_dir
    
    shared.run(cmd, config, from_dir=wps_run_dir)

    cmd = 'grep "Successful completion" %s/metgrid.log*' % wps_run_dir
    ret =shared.run_cmd(cmd, config)
    if ret!=0:
        raise IOError('metgrid.exe did not complete')
    
    logger.info('*** SUCESS METGRID ***\n')
Beispiel #23
0
def prepare_wrf(config):
    """Checks that met_em files exist, and links into WRF/run directory. 
    
    Arguments:
    config -- a dictionary containing forecast options

    """
    logger = shared.get_logger()
    logger.debug('*** PREPARING FILES FOR WRF ***')

    met_em_format = "%Y-%m-%d_%H:%M:%S"

    max_dom = config['max_dom']
    domains = range(1, max_dom + 1)
    init_time = config['init_time']
    fcst_hours = config['fcst_hours']
    bdy_interval = config['bdy_interval']
    bdy_times = shared.get_bdy_times(config)
    met_em_dir = shared.sub_date(config['met_em_dir'], init_time=init_time)
    met_em_files = [
        '%s/met_em.d%02d.%s.nc' % (met_em_dir, d, t.strftime(met_em_format))
        for d in domains for t in bdy_times
    ]
    wrf_run_dir = config['wrf_run_dir']
    namelist_run = '%s/namelist.input' % wrf_run_dir
    namelist_input = config['namelist_input']

    logger.debug('linking met_em files:')

    #
    # Link met_em files. There are two options for error handling here.
    # The first is to abort if any of the met_em files are missing.
    # The second is just to run wrf and see how far it gets before
    # running out of files. This will allow a partial forecast to run,
    # even if later files are missing.
    #
    # To use the first approach, raise an exception when a missing
    # file is encountered, otherwise just print a warning message.
    #
    # Actually, the two are equivalent so long as the met_em files
    # are sorted.
    #
    for f in met_em_files:
        if not os.path.exists(f):
            raise IOError('met_em file missing : %s' % f)
        cmd = 'ln -sf %s %s/' % (f, wrf_run_dir)
        shared.run_cmd(cmd, config)

    logger.debug('linking namelist.input to wrf_run_dir')
    cmd = 'rm -f %s' % namelist_run
    shared.run_cmd(cmd, config)
    cmd = 'ln -sf %s %s' % (namelist_input, namelist_run)
    shared.run_cmd(cmd, config)

    logger.debug('*** FINISHED PREPARING FILES FOR WRF ***')
Beispiel #24
0
def prepare_wrf(config):
    """Checks that met_em files exist, and links into WRF/run directory. 
    
    Arguments:
    config -- a dictionary containing forecast options

    """
    logger =shared.get_logger()    
    logger.debug('*** PREPARING FILES FOR WRF ***')
    
    met_em_format      = "%Y-%m-%d_%H:%M:%S"
    

    max_dom      = config['max_dom']
    domains      = range(1,max_dom+1)
    init_time    = config['init_time']
    fcst_hours   = config['fcst_hours']
    bdy_interval = config['bdy_interval']
    bdy_times    = shared.get_bdy_times(config)
    met_em_dir   = shared.sub_date(config['met_em_dir'], init_time=init_time)
    met_em_files = ['%s/met_em.d%02d.%s.nc' % (met_em_dir,d, t.strftime(met_em_format)) for d in domains for t in bdy_times] 
    wrf_run_dir    = config['wrf_run_dir']
    namelist_run   = '%s/namelist.input' % wrf_run_dir
    namelist_input = config['namelist_input']
    
    
    logger.debug('linking met_em files:')
    
    #
    # Link met_em files. There are two options for error handling here.
    # The first is to abort if any of the met_em files are missing.
    # The second is just to run wrf and see how far it gets before
    # running out of files. This will allow a partial forecast to run, 
    # even if later files are missing.
    #    
    # To use the first approach, raise an exception when a missing
    # file is encountered, otherwise just print a warning message.
    #
    # Actually, the two are equivalent so long as the met_em files 
    # are sorted.
    #
    for f in met_em_files:
        if not os.path.exists(f):
            raise IOError('met_em file missing : %s' %f)
        cmd = 'ln -sf %s %s/'%(f, wrf_run_dir)
        shared.run_cmd(cmd, config)
    
    
    logger.debug('linking namelist.input to wrf_run_dir')
    cmd = 'rm -f %s' % namelist_run
    shared.run_cmd(cmd, config)
    cmd = 'ln -sf %s %s' %(namelist_input, namelist_run)
    shared.run_cmd(cmd, config)

    logger.debug('*** FINISHED PREPARING FILES FOR WRF ***')
Beispiel #25
0
def extract_tseries(config):

    logger = shared.get_logger()
    logger.info("\n*** EXTRACTING TIME SERIES ***")

    wrfout_dir = config["wrfout_dir"]
    tseries_dir = config["tseries_dir"]
    json_dir = config["json_dir"]
    init_time = config["init_time"]
    dom = config["dom"]
    fcst_file = "%s/wrfout_d%02d_%s:00:00.nc" % (
        wrfout_dir,
        dom,
        init_time.strftime("%Y-%m-%d_%H"),
    )  # note we add on the nc extension here
    ncl_loc_file = config["locations_file"]
    ncl_code = config["tseries_code"]
    extract_hgts = config["extract_hgts"]
    # tseries_fmt    = config['tseries_fmt']
    ncl_log = config["ncl_log"]
    ncl_opt_template = config["ncl_opt_template"]
    ncl_opt_file = config["ncl_opt_file"]

    if not os.path.exists(tseries_dir):
        os.makedirs(tseries_dir)

    # Always go via the netcdf file
    tseries_file = "%s/tseries_d%02d_%s.nc" % (tseries_dir, dom, init_time.strftime("%Y-%m-%d_%H"))

    ncl_hgts = "(/%s/)" % ",".join(map(str, extract_hgts))
    replacements = {
        "<ncl_in_file>": fcst_file,
        "<ncl_out_file>": tseries_file,
        "<ncl_out_dir>": tseries_dir,
        "<ncl_out_type>": "nc",
        "<ncl_loc_file>": ncl_loc_file,
        "<extract_heights>": ncl_hgts,
    }

    shared.fill_template(ncl_opt_template, ncl_opt_file, replacements)

    logger.debug("ncl_opt_template: %s" % ncl_opt_template)
    logger.debug("    ncl_in_file  ----> %s" % fcst_file)
    logger.debug("    ncl_out_dir  ----> %s" % tseries_dir)
    logger.debug("    ncl_out_type ----> %s" % "nc")
    logger.debug("    ncl_loc_file ----> %s" % ncl_loc_file)
    logger.debug("ncl_opt_file: %s" % ncl_opt_file)

    for script in ncl_code:
        cmd = "NCL_OPT_FILE=%s ncl %s >> %s 2>&1" % (ncl_opt_file, script, ncl_log)
        shared.run_cmd(cmd, config)

    logger.info("*** DONE EXTRACTING TIME SERIES ***\n")
 def __init__(self, logger=None):
     """
     Initialization of GenericJsonDao
     :param logger: logger
     """
     self._logger = logger or get_logger('TEST')
     try:
         self._dbclient = DbClientFactory.get_client(DbClientConfig.get(SCHEMA), self._logger)
         self._engine = self.dbclient.get_engine()
         Base.metadata.create_all(self._engine)
     except Exception as e:
         self._logger.exception("Unable to initialize MySQL " + SCHEMA + " scheme: " + str(e))
Beispiel #27
0
    def __init__(self, db_client_config, logger=None):
        """
        Constructor for creating an instance of the MySQLClient
        :param db_client_config: the configuration for the client to use
        :param logger: an instance of logger
        """
        self._connect_string = "mysql://{0}:{1}@{2}/{3}".format(
            db_client_config.user, db_client_config.password,
            db_client_config.host, db_client_config.database)
        self._logger = logger or get_logger('TEST')

        self._engine = None
        self._session = None
Beispiel #28
0
def extract_tseries(config):

    logger = shared.get_logger()
    logger.info('\n*** EXTRACTING TIME SERIES ***')
     
    wrfout_dir     = config['wrfout_dir']
    tseries_dir    = config['tseries_dir']
    json_dir       = config['json_dir']
    init_time      = config['init_time']
    dom            = config['dom']
    fcst_file      = '%s/wrfout_d%02d_%s:00:00.nc' %(wrfout_dir, dom, init_time.strftime("%Y-%m-%d_%H")) # note we add on the nc extension here
    ncl_loc_file   = config['locations_file']
    ncl_code       = config['tseries_code']
    extract_hgts   = config['extract_hgts']
    #tseries_fmt    = config['tseries_fmt']
    ncl_log        = config['ncl_log']
    ncl_opt_template = config['ncl_opt_template']
    ncl_opt_file     = config['ncl_opt_file']

    if not os.path.exists(tseries_dir):
        os.makedirs(tseries_dir)
    
    # Always go via the netcdf file
    tseries_file = '%s/tseries_d%02d_%s.nc' % (tseries_dir, dom,init_time.strftime("%Y-%m-%d_%H"))

    ncl_hgts = '(/%s/)' % ','.join(map(str,extract_hgts))
    replacements = {'<ncl_in_file>'  : fcst_file, 
                    '<ncl_out_file>' : tseries_file,
                    '<ncl_out_dir>'  : tseries_dir, 
                    '<ncl_out_type>' : "nc",
                    '<ncl_loc_file>' : ncl_loc_file,
                    '<extract_heights>': ncl_hgts}
        

    shared.fill_template(ncl_opt_template, ncl_opt_file, replacements)
        
    logger.debug('ncl_opt_template: %s' % ncl_opt_template)
    logger.debug('    ncl_in_file  ----> %s' % fcst_file)
    logger.debug('    ncl_out_dir  ----> %s' % tseries_dir)
    logger.debug('    ncl_out_type ----> %s' % "nc")
    logger.debug('    ncl_loc_file ----> %s' % ncl_loc_file)
    logger.debug('ncl_opt_file: %s' % ncl_opt_file)
    
    
    
    for script in ncl_code:
        cmd  = "NCL_OPT_FILE=%s ncl %s >> %s 2>&1" % (ncl_opt_file,script, ncl_log)
        shared.run_cmd(cmd, config)

    logger.info("*** DONE EXTRACTING TIME SERIES ***\n")
Beispiel #29
0
def extract_tseries(config):

    logger = shared.get_logger()
    logger.info('*** EXTRACTING TIME SERIES ***')

    wrfout_dir = config['wrfout_dir']
    tseries_dir = config['tseries_dir']
    json_dir = config['json_dir']
    init_time = config['init_time']
    dom = config['dom']
    fcst_file = '%s/wrfout_d%02d_%s:00:00.nc' % (
        wrfout_dir, dom, init_time.strftime("%Y-%m-%d_%H")
    )  # note we add on the nc extension here
    loc_file = config['locations_file']
    ncl_code = config['tseries_code']
    extract_hgts = config['extract_hgts']
    tseries_fmt = config['tseries_fmt']
    ncl_opt_file = config['ncl_opt_file']

    ncl_log = config['ncl_log']
    if not os.path.exists(tseries_dir):
        os.makedirs(tseries_dir)

    # Always go via the netcdf file
    tseries_file = '%s/tseries_d%02d_%s.nc' % (
        tseries_dir, dom, init_time.strftime("%Y-%m-%d_%H"))

    os.environ['FCST_FILE'] = fcst_file
    os.environ['LOCATIONS_FILE'] = loc_file
    os.environ['NCL_OUT_DIR'] = tseries_dir
    os.environ['NCL_OUT_FILE'] = tseries_file
    os.environ['NCL_OPT_FILE'] = ncl_opt_file

    logger.debug('Setting environment variables')
    logger.debug('FCST_FILE    ----> %s' % fcst_file)
    logger.debug('NCL_OUT_DIR  ----> %s' % tseries_dir)
    logger.debug('NCL_OUT_FILE  ----> %s' % tseries_file)
    logger.debug('LOCATIONS_FILE ----> %s' % loc_file)
    logger.debug('NCL_OPT_FILE   ----> %s' % ncl_opt_file)
    logger.debug(extract_hgts)

    ncl_hgts = '(/%s/)' % ','.join(map(str, extract_hgts))

    for script in ncl_code:
        cmd = "ncl 'extract_heights=%s'  %s >> %s 2>&1" % (ncl_hgts, script,
                                                           ncl_log)
        shared.run_cmd(cmd, config)

    ncdump(config)
Beispiel #30
0
def move_wrfout_files(config):
    """ Moves output files from run directory to wrfout 
    director"""
    logger = shared.get_logger()
    logger.debug('\n*** MOVING WRFOUT FILES AND NAMELIST SETTINGS ***')

    domain = config['domain']
    model_run = config['model_run']
    working_dir = config['working_dir']
    wrf_run_dir = config['wrf_run_dir']
    init_time = config['init_time']
    init_str = init_time.strftime('%Y-%m-%d_%H')

    namelist_input = config['namelist_input']
    namelist_wps = config['namelist_wps']

    wrfout_dir = '%s/wrfout' % (working_dir)
    log_dir = '%s/log' % working_dir
    rsl_dir = '%s/rsl' % working_dir
    namelist_dir = '%s/namelist' % working_dir
    run_key = '%s.%s' % (domain, model_run)  # composite key

    logger.debug('Moving wrfout files from %s ----> %s' %
                 (wrf_run_dir, wrfout_dir))

    # Move WRF output files to new directory
    flist = glob.glob(wrf_run_dir + '/wrfout*')
    shared.transfer(flist, wrfout_dir, mode='move', debug_level='debug')

    # Move log files to new directoy
    #flist = glob.glob(wrf_run_dir+'/rsl.*')
    #transfer(flist, rsl_dir, mode='move', debug_level='debug')

    cmd = 'cp %s %s/namelist.input.%s.%s' % (namelist_input, namelist_dir,
                                             run_key, init_str)
    shared.run_cmd(cmd, config)

    cmd = 'cp %s/namelist.wps %s/namelist.wps.%s.%s' % (
        working_dir, namelist_dir, run_key, init_str)
    shared.run_cmd(cmd, config)

    #
    # Archive log files
    #
    logger.debug('moving rsl files ----> %s' % rsl_dir)
    cmd = 'cp %s/rsl.out.0000 %s/rsl.out.%s' % (wrf_run_dir, rsl_dir, run_key)
    shared.run_cmd(cmd, config)
    logger.debug("*** FINISHED MOVING WRFOUT FILES ***")
Beispiel #31
0
def extract_tseries(config):

    logger = shared.get_logger()
    logger.info('*** EXTRACTING TIME SERIES ***')
     
    wrfout_dir     = config['wrfout_dir']
    tseries_dir    = config['tseries_dir']
    json_dir       = config['json_dir']
    init_time      = config['init_time']
    dom            = config['dom']
    fcst_file      = '%s/wrfout_d%02d_%s:00:00.nc' %(wrfout_dir, dom, init_time.strftime("%Y-%m-%d_%H")) # note we add on the nc extension here
    loc_file       = config['locations_file']
    ncl_code       = config['tseries_code']
    extract_hgts   = config['extract_hgts']
    tseries_fmt    = config['tseries_fmt']
    ncl_opt_file   = config['ncl_opt_file']
    
    
    ncl_log        = config['ncl_log']
    if not os.path.exists(tseries_dir):
        os.makedirs(tseries_dir)
    
    # Always go via the netcdf file
    tseries_file = '%s/tseries_d%02d_%s.nc' % (tseries_dir, dom,init_time.strftime("%Y-%m-%d_%H"))

    os.environ['FCST_FILE']      = fcst_file
    os.environ['LOCATIONS_FILE'] = loc_file
    os.environ['NCL_OUT_DIR']    = tseries_dir
    os.environ['NCL_OUT_FILE']   = tseries_file
    os.environ['NCL_OPT_FILE']   = ncl_opt_file
    
    
    logger.debug('Setting environment variables')
    logger.debug('FCST_FILE    ----> %s'  % fcst_file)
    logger.debug('NCL_OUT_DIR  ----> %s'  % tseries_dir)
    logger.debug('NCL_OUT_FILE  ----> %s' % tseries_file)
    logger.debug('LOCATIONS_FILE ----> %s' % loc_file)
    logger.debug('NCL_OPT_FILE   ----> %s' % ncl_opt_file)
    logger.debug(extract_hgts)

    ncl_hgts = '(/%s/)' % ','.join(map(str,extract_hgts))
    
    for script in ncl_code:
        cmd  = "ncl 'extract_heights=%s'  %s >> %s 2>&1" % (ncl_hgts,script, ncl_log)
        shared.run_cmd(cmd, config)

    ncdump(config)
Beispiel #32
0
def move_wrfout_files(config):
    """ Moves output files from run directory to wrfout 
    director"""
    logger =shared.get_logger()    
    logger.debug('\n*** MOVING WRFOUT FILES AND NAMELIST SETTINGS ***')
    
    domain        = config['domain']
    model_run     = config['model_run']
    working_dir = config['working_dir']
    wrf_run_dir   = config['wrf_run_dir']
    init_time     = config['init_time']
    init_str      = init_time.strftime('%Y-%m-%d_%H')

    namelist_input = config['namelist_input']
    namelist_wps   = config['namelist_wps']
    
    wrfout_dir    = '%s/wrfout'   %(working_dir)
    log_dir       = '%s/log'      % working_dir    
    rsl_dir       = '%s/rsl'      % working_dir
    namelist_dir  = '%s/namelist' % working_dir
    run_key       = '%s.%s'       %(domain, model_run)    # composite key  

    logger.debug('Moving wrfout files from %s ----> %s' %(wrf_run_dir, wrfout_dir) )

    # Move WRF output files to new directory
    flist = glob.glob(wrf_run_dir+'/wrfout*')
    shared.transfer(flist, wrfout_dir, mode='move', debug_level='debug')

    # Move log files to new directoy
    #flist = glob.glob(wrf_run_dir+'/rsl.*')
    #transfer(flist, rsl_dir, mode='move', debug_level='debug')

    cmd = 'cp %s %s/namelist.input.%s.%s' % (namelist_input, namelist_dir, run_key, init_str)
    shared.run_cmd(cmd, config)
    
    cmd = 'cp %s/namelist.wps %s/namelist.wps.%s.%s' % (working_dir, namelist_dir, run_key, init_str)
    shared.run_cmd(cmd, config)


    #
    # Archive log files
    # 
    logger.debug('moving rsl files ----> %s' % rsl_dir )
    cmd = 'cp %s/rsl.out.0000 %s/rsl.out.%s' %(wrf_run_dir, rsl_dir, run_key)
    shared.run_cmd(cmd, config)
    logger.debug("*** FINISHED MOVING WRFOUT FILES ***")
Beispiel #33
0
def finalise(config):
    """Removes files, transfers etc."""

    logger = shared.get_logger()

    logger.info('*** FINALISING ***')
    
    working_dir    = config['working_dir']
    
    
    links       = [shared.expand(x, config) for x in config['finalise.link']]
    remove      = [shared.expand(x, config) for x in config['finalise.remove']]
    subdirs     = [shared.expand(x, config) for x in config['finalise.create']]
    copy        = [shared.expand(x, config) for x in config['finalise.copy']]
    move        = [shared.expand(x, config) for x in config['finalise.move']]
    run         = [shared.expand(x, config) for x in config['finalise.run']]
    
        
    fulldirs  = subdirs 
    for d in fulldirs:
        if not os.path.exists(d):
            logger.debug('creating directory %s ' %d)
            os.mkdir(d) 
   
    
    for arg in move:
        cmd = "mv %s" % arg
        shared.run_cmd(cmd, config)

    for arg in copy:
        cmd = "cp %s" % arg
        shared.run_cmd(cmd, config)        
        
    for pattern in links:
        shared.link(pattern)
    
    for cmd in run:
        shared.run_cmd(cmd, config)
    
    for pattern in remove:
        flist = glob.glob(pattern)
        for f in flist:
            if os.path.exists(f):
                os.remove(f)
    
    logger.info('*** DONE FINALISE ***')    
Beispiel #34
0
def get_sst(config):
    """ Downloads SST fields from an ftp server.
    Whoever is running this must have the http_proxy environment variable set
    correctly to allow them to download files through the proxy.  Example:
    http_proxy = http://slha:[email protected]:8080"""
    logger = shared.get_logger()
    # create an lftpscript in model run dir

    logger.info('*** FETCHING SST ***')
    working_dir = config['working_dir']
    tmp_dir = config['tmp_dir']
    http_proxy = os.environ['http_proxy']
    home = os.environ['HOME']
    sst_server = config['sst_server']
    sst_server_dir = config['sst_server_dir']
    sst_local_dir = config['sst_local_dir']
    sst_time = shared.get_sst_time(config)
    sst_filename = shared.sub_date(shared.get_sst_filename(config),
                                   init_time=config['init_time'])

    if not os.path.exists(sst_local_dir):
        os.makedirs(sst_local_dir)

    if os.path.exists('%s/%s' % (sst_local_dir, sst_filename)):
        logger.info('*** SST ALREADY EXISTS LOCALLY, NOT DOWNLOADED ***')
        return

    lftpfilename = '%s/lftpscript' % working_dir
    logger.debug('Writing lftpscript to %s' % lftpfilename)
    lftpscript = open(lftpfilename, 'w')
    lftpscript.write('lcd %s\n' % sst_local_dir)
    lftpscript.write('set ftp:proxy %s\n' % http_proxy)
    lftpscript.write('set hftp:use-type no\n')
    lftpscript.write('open %s\n' % sst_server)
    lftpscript.write('get %s/%s\n' % (sst_server_dir, sst_filename))
    lftpscript.write('bye')
    lftpscript.close()

    cmd = '/usr/bin/lftp -f %s' % lftpfilename
    shared.run_cmd(cmd, config)
    # check if file downloaded

    if not os.path.exists('%s/%s' % (sst_local_dir, sst_filename)):
        raise IOError('SST file: %s not downloaded' % sst_filename)
    logger.info('*** SUCCESS SST DOWNLOADED ***')
Beispiel #35
0
def get_sst(config):
    """ Downloads SST fields from an ftp server.
    Whoever is running this must have the http_proxy environment variable set
    correctly to allow them to download files through the proxy.  Example:
    http_proxy = http://slha:[email protected]:8080"""
    logger      = shared.get_logger()
    # create an lftpscript in model run dir
    
    logger.info('*** FETCHING SST ***')
    working_dir    = config['working_dir']
    tmp_dir        = config['tmp_dir']
    http_proxy     = os.environ['http_proxy']
    home           = os.environ['HOME']
    sst_server     = config['sst_server']
    sst_server_dir = config['sst_server_dir']
    sst_local_dir  = config['sst_local_dir']
    sst_time       = shared.get_sst_time(config)
    sst_filename   = shared.sub_date(shared.get_sst_filename(config), init_time=config['init_time'])
   
    if not os.path.exists(sst_local_dir):
        os.makedirs(sst_local_dir)
    
    if os.path.exists('%s/%s' %(sst_local_dir, sst_filename)):
        logger.info('*** SST ALREADY EXISTS LOCALLY, NOT DOWNLOADED ***')
        return
    
    lftpfilename = '%s/lftpscript' % working_dir
    logger.debug('Writing lftpscript to %s' % lftpfilename)
    lftpscript     = open(lftpfilename, 'w')    
    lftpscript.write('lcd %s\n' % sst_local_dir)    
    lftpscript.write('set ftp:proxy %s\n' % http_proxy) 
    lftpscript.write('set hftp:use-type no\n')
    lftpscript.write('open %s\n' % sst_server)
    lftpscript.write('get %s/%s\n' % (sst_server_dir,sst_filename))
    lftpscript.write('bye')
    lftpscript.close()
    
    cmd = '/usr/bin/lftp -f %s' % lftpfilename
    shared.run_cmd(cmd, config)
    # check if file downloaded

    if not os.path.exists('%s/%s' %(sst_local_dir, sst_filename)):
        raise IOError('SST file: %s not downloaded' % sst_filename)
    logger.info('*** SUCCESS SST DOWNLOADED ***')
Beispiel #36
0
def finalise(config):
    """Removes files, transfers etc."""

    logger = shared.get_logger()

    logger.info('*** FINALISING ***')

    working_dir = config['working_dir']

    links = [shared.expand(x, config) for x in config['finalise.link']]
    remove = [shared.expand(x, config) for x in config['finalise.remove']]
    subdirs = [shared.expand(x, config) for x in config['finalise.create']]
    copy = [shared.expand(x, config) for x in config['finalise.copy']]
    move = [shared.expand(x, config) for x in config['finalise.move']]
    run = [shared.expand(x, config) for x in config['finalise.run']]

    fulldirs = subdirs
    for d in fulldirs:
        if not os.path.exists(d):
            logger.debug('creating directory %s ' % d)
            os.mkdir(d)

    for arg in move:
        cmd = "mv %s" % arg
        shared.run_cmd(cmd, config)

    for arg in copy:
        cmd = "cp %s" % arg
        shared.run_cmd(cmd, config)

    for pattern in links:
        shared.link(pattern)

    for cmd in run:
        shared.run_cmd(cmd, config)

    for pattern in remove:
        flist = glob.glob(pattern)
        for f in flist:
            if os.path.exists(f):
                os.remove(f)

    logger.info('*** DONE FINALISE ***')
Beispiel #37
0
def run_real(config):
    """ Run real.exe and check output was sucessful
    Arguments:
    config -- dictionary containing various configuration options """
    
    logger =shared.get_logger()    
    logger.info('*** RUNNING REAL ***')
    
    queue           = config['queue']
    working_dir   = config['working_dir']
    wrf_run_dir     = config['wrf_run_dir']
    wps_dir         = config['wps_dir']
    domain          = config['domain']
    model_run       = config['model_run']
    init_time       = config['init_time']
    log_file        = '%s/real.log' % wrf_run_dir


    # Log files from real appear in the current directory, 
    # so we need to change directory first.
    os.chdir(wrf_run_dir)
    cmd     =  "%s/real.exe" % wrf_run_dir
    shared.run(cmd, config, wrf_run_dir)
    
    
    rsl = '%s/rsl.error.0000' % wrf_run_dir
    if not os.path.exists(rsl):
        raise IOError('No log file found for real.exe')

    # now copy rsl file to a log directory
    cmd = 'cp %s %s/rsl/rsl.error.%s.%s.%s' % (rsl, working_dir, domain, model_run, init_time.strftime('%y-%m-%d_%H') )
    shared.run_cmd(cmd, config)



    cmd = 'grep "SUCCESS COMPLETE" %s/rsl.error.0000' % wrf_run_dir
    ret =shared.run_cmd(cmd, config)
    
    if ret!=0:
        raise IOError('real.exe did not complete')


    logger.info('*** SUCESS REAL ***')
Beispiel #38
0
def run_real(config):
    """ Run real.exe and check output was sucessful
    Arguments:
    config -- dictionary containing various configuration options """
    
    logger =shared.get_logger()    
    logger.info('\n*** RUNNING REAL ***')
    
    queue           = config['queue']
    working_dir   = config['working_dir']
    wrf_run_dir     = config['wrf_run_dir']
    wps_dir         = config['wps_dir']
    domain          = config['domain']
    model_run       = config['model_run']
    init_time       = config['init_time']
    log_file        = '%s/real.log' % wrf_run_dir


    # Log files from real appear in the current directory, 
    # so we need to change directory first.
    os.chdir(wrf_run_dir)
    cmd     =  "%s/real.exe" % wrf_run_dir
    shared.run(cmd, config, from_dir=wrf_run_dir)
    
    
    rsl = '%s/rsl.error.0000' % wrf_run_dir
    if not os.path.exists(rsl):
        raise IOError('No log file found for real.exe')

    # now copy rsl file to a log directory
    cmd = 'cp %s %s/rsl/rsl.error.%s.%s.%s' % (rsl, working_dir, domain, model_run, init_time.strftime('%y-%m-%d_%H') )
    shared.run_cmd(cmd, config)



    cmd = 'grep "SUCCESS COMPLETE" %s/rsl.error.0000' % wrf_run_dir
    ret =shared.run_cmd(cmd, config)
    
    if ret!=0:
        raise IOError('real.exe did not complete')


    logger.info('*** SUCESS REAL ***\n')
Beispiel #39
0
def convert_grib(config):
    """Converts the grib1 outputs of UPP to grib2 format, mainly so the wgrib2 tool 
    can be used to extract csv time series from it.
    
    
    Should not rely on globbing directories here. Could have nasty consequences,
    e.g. conversion of too many files etc """

    logger = shared.get_logger()
    logger.debug('*** CONVERTING GRIB1 TO GRIB2 ***')
    domain_dir = config['domain_dir']
    model_run = config['model_run']
    init_time = config['init_time']
    dom = config['dom']

    f1 = '%s/%s/archive/wrfpost_d%02d_%s.grb' % (
        domain_dir, model_run, dom, init_time.strftime('%Y-%m-%d_%H'))
    f2 = f1.replace('.grb', '.grib2')
    cmd = 'cnvgrib -g12 %s %s' % (f1, f2)
    shared.run_cmd(cmd, config)
Beispiel #40
0
def run_gribmaster(config):
    """Runs the gribmaster programme to download the most recent boundary conditions """
    logger = shared.get_logger()
    gm_dir = config['gm_dir']
    gm_transfer = config['gm_transfer']
    gm_dataset = config['gm_dataset']
    start = config['init_time']
    fcst_hours = config['fcst_hours']
    gm_log = config['gm_log']
    gm_sleep = config['gm_sleep']  # this is in minutes
    gm_max_attempts = int(config['gm_max_attempts'])

    log_dir = '/home/slha/forecasting'

    cmd = '%s/gribmaster --verbose --%s --dset %s --date %s --cycle %s --length %s > %s' % (
        gm_dir, gm_transfer, gm_dataset, start.strftime('%Y%m%d'),
        start.strftime('%H'), fcst_hours, gm_log)

    for attempt in range(gm_max_attempts):
        logger.info('*** RUNNING GRIBMASTER, %s attempt ***' % (attempt + 1))
        shared.run_cmd(cmd, config)

        cmd = 'grep "BUMMER" %s' % gm_log  # check for failure
        ret = subprocess.call(cmd, shell=True)
        # if we positively find the string BUMMER, we know we have failed
        if ret == 0:
            logger.error('*** FAIL GRIBMASTER: Attempt %d of %d ***' %
                         (attempt + 1, gm_max_attempts))
            logger.info('Sleeping for %s minutes' % gm_sleep)
            time.sleep(gm_sleep * 60)

        # else we check for definite sucess
        else:
            cmd = 'grep "ENJOY" %s' % gm_log  # check for failure
            ret = subprocess.call(cmd, shell=True)
            if ret == 0:
                logger.info('*** SUCESS GRIBMASTER ***')
                return

    raise IOError('gribmaster did not find files after %d attempts' %
                  gm_max_attempts)
Beispiel #41
0
def run_gribmaster(config):
    """Runs the gribmaster programme to download the most recent boundary conditions """
    logger      = shared.get_logger()
    gm_dir      = config['gm_dir']
    gm_transfer = config['gm_transfer']
    gm_dataset  = config['gm_dataset']
    start       = config['init_time']
    fcst_hours  = config['fcst_hours']
    gm_log      = config['gm_log']
    gm_sleep    = config['gm_sleep'] # this is in minutes
    gm_max_attempts = int(config['gm_max_attempts'])

    log_dir = '/home/slha/forecasting'
       
    cmd     = '%s/gribmaster --verbose --%s --dset %s --date %s --cycle %s --length %s > %s' %(gm_dir, gm_transfer, gm_dataset, start.strftime('%Y%m%d'), start.strftime('%H'), fcst_hours, gm_log )

    for attempt in range(gm_max_attempts):
        logger.info('*** RUNNING GRIBMASTER, %s attempt ***' % (attempt+1))
        shared.run_cmd(cmd, config)
        
        cmd = 'grep "BUMMER" %s' % gm_log # check for failure
        ret = subprocess.call(cmd, shell=True)
        # if we positively find the string BUMMER, we know we have failed
        if ret==0:
            logger.error('*** FAIL GRIBMASTER: Attempt %d of %d ***' % (attempt+1, gm_max_attempts))
            logger.info('Sleeping for %s minutes' % gm_sleep) 
            time.sleep(gm_sleep*60)
        
        # else we check for definite sucess
        else:
            cmd = 'grep "ENJOY" %s' % gm_log # check for failure
            ret = subprocess.call(cmd, shell=True)
            if ret==0:
                logger.info('*** SUCESS GRIBMASTER ***')
                return
        
        
    raise IOError('gribmaster did not find files after %d attempts' % gm_max_attempts)
Beispiel #42
0
def convert_grib(config):
    """Converts the grib1 outputs of UPP to grib2 format, mainly so the wgrib2 tool 
    can be used to extract csv time series from it.
    
    
    Should not rely on globbing directories here. Could have nasty consequences,
    e.g. conversion of too many files etc """

    logger=shared.get_logger()
    logger.debug('*** CONVERTING GRIB1 TO GRIB2 ***')
    domain_dir = config['domain_dir']
    model_run  = config['model_run']
    init_time  = config['init_time']
    dom        = config['dom']
    
    
    
    f1 = '%s/%s/archive/wrfpost_d%02d_%s.grb' % (domain_dir, model_run, dom, init_time.strftime('%Y-%m-%d_%H'))
    f2 =  f1.replace('.grb', '.grib2')
    cmd = 'cnvgrib -g12 %s %s' %(f1, f2)
    shared.run_cmd(cmd, config)
    
    
Beispiel #43
0
def compress(config):
    """Compresses netcdf files to netcdf4 format. Relies on 
    the NCO operator nccopy.  Will try and compress all output netcdf files
    associated with the current initial time, based on the standard WRF naming 
    convention.  If a simulation produces multiple wrfout files for an
    initial time (i.e. one file per day for three days), then only the first file
    will be compressed under the current configuration.
    
    nccopy does not support the -O overwrite flag, so we need to manually rename the files,
    and remove the originals on sucess"""

    logger = shared.get_logger()
    logger.info("*** Compressing wrfout files ***")
    wrfout_dir = config['wrfout_dir']
    init_time = config['init_time']
    max_dom = config['max_dom']
    comp_level = config['compression_level']

    wrfout_files = [
        '%s/wrfout_d%02d_%s' %
        (wrfout_dir, d, init_time.strftime('%Y-%m-%d_%H:%M:%S'))
        for d in range(1, max_dom + 1)
    ]
    for f in wrfout_files:
        if not os.path.exists(f):
            raise MissingFile("could not find %s" % f)
        tmp_name = f + '.tmp'
        logger.debug("compressing %s to temporary file: %s" % (f, tmp_name))
        cmd = 'nccopy -k4 -d %s %s %s' % (comp_level, f, tmp_name)
        shared.run(cmd, config)
        if not os.path.exists(tmp_name):
            raise IOError("compression failed for %s" % f)

        os.remove('%s' % f)
        os.rename(f + '.tmp', f)

    logger.info("*** Done compressing wrfout files ***")
Beispiel #44
0
def run_ndown(config):
    logger =shared.get_logger()
    logger.info('*** RUNNING NDOWN ***')
    
    wrf_run_dir = config['wrf_run_dir']
    queue       = config['queue']
    log_file    = '%s/ndown.log' % wrf_run_dir
    
    cmd = '%s/ndown.exe' % wrf_run_dir
    
    nprocs = config['num_procs']
    poll_interval = config['poll_interval']
    logger.debug(poll_interval)
    logger.debug(nprocs)
    logger.debug(nprocs['ndown.exe'])
    
    shared.run(cmd, config, wrf_run_dir)
        
    cmd = 'grep "Successful completion" %s' % log_file # check for success
    ret =shared.run_cmd(cmd,config)
    if ret!=0:
        raise IOError('ndown.exe did not complete')
    
    logger.info('*** SUCESS NDOWN ***')
Beispiel #45
0
def run_ndown(config):
    logger = shared.get_logger()
    logger.info('*** RUNNING NDOWN ***')

    wrf_run_dir = config['wrf_run_dir']
    queue = config['queue']
    log_file = '%s/ndown.log' % wrf_run_dir

    cmd = '%s/ndown.exe' % wrf_run_dir

    nprocs = config['num_procs']
    poll_interval = config['poll_interval']
    logger.debug(poll_interval)
    logger.debug(nprocs)
    logger.debug(nprocs['ndown.exe'])

    shared.run(cmd, config, wrf_run_dir)

    cmd = 'grep "Successful completion" %s' % log_file  # check for success
    ret = shared.run_cmd(cmd, config)
    if ret != 0:
        raise IOError('ndown.exe did not complete')

    logger.info('*** SUCESS NDOWN ***')
Beispiel #46
0
def transfer_to_web_dir(config):
    """ Transfers all plots in output folder to web folder"""
    
    logger = shared.get_logger()    
    logger.debug('Transferring plot files to web dir')
    init_time      = config['init_time']    
    full_trace     = config['full_trace']
    ncl_out_dir    = shared.sub_date(config['ncl_out_dir'], init_time=init_time)
    ncl_web_dir    = shared.sub_date(config['ncl_web_dir'], init_time=init_time)
    
    if not os.path.exists(ncl_web_dir):
        os.makedirs(ncl_web_dir)
    
    flist = glob.glob(ncl_out_dir+'/*')
    shared.transfer(flist, ncl_web_dir, mode='copy', debug_level='NONE')

    ncl_out_dir    = shared.sub_date(config['ncl_ol_out_dir'], init_time=init_time)
    ncl_web_dir    = shared.sub_date(config['ncl_ol_web_dir'], init_time=init_time)
    
    if not os.path.exists(ncl_web_dir):
        os.makedirs(ncl_web_dir)
    
    flist = glob.glob(ncl_out_dir+'/*')
    shared.transfer(flist, ncl_web_dir, mode='copy', debug_level='NONE')
Beispiel #47
0
def run_ungrib(config):
    """ Runs ungrib.exe and checks output was sucessfull
    If vtable and gbr_input_fmt are NOT dictionaries, 
    then dictionarius will be constructed from them using 
    the key bdy_conditions from the metadata
    
    Arguments:
    config -- dictionary specifying configuration options
    
    """
    logger = shared.get_logger()
    wps_dir = config['wps_dir']
    wps_run_dir = config['wps_run_dir']
    namelist_wps = config['namelist_wps']
    working_dir = config['working_dir']
    met_em_dir = config['met_em_dir']
    init_time = config['init_time']
    log_file = '%s/ungrib.log' % wps_run_dir
    vtable = config['vtable']
    grb_input_fmt = config['grb_input_fmt']
    grb_input_delay = config.get(
        "grb_input_delay")  # this allows None to be returned

    bdy_conditions = config['bdy_conditions']

    logger.info("\n*** RUNNING UNGRIB ***")

    namelist = shared.read_namelist(namelist_wps)

    bdy_times = shared.get_bdy_times(config)

    if type(grb_input_fmt) != type({}):
        grb_input_fmt = {bdy_conditions: grb_input_fmt}

    if type(vtable) != type({}):
        vtable = {bdy_conditions: vtable}

    #
    # Check that boundary conditions exist
    #
    for key in vtable.keys():

        if grb_input_delay and key in grb_input_delay:
            logger.debug("applying delay")
            delay = datetime.timedelta(0, grb_input_delay[key] * 60 * 60)
            new_bdy_times = [b - delay for b in bdy_times]
        else:
            logger.debug("no delay applied")
            new_bdy_times = bdy_times

        fmt = grb_input_fmt[key]
        #
        # Generate filelist based on the initial time, and the forecast hour
        #
        filelist = list(
            OrderedDict.fromkeys(shared.get_bdy_filenames(fmt, new_bdy_times)))

        #
        # Check the boundary files exist
        #
        logger.debug('checking boundary condition files exists')
        for f in filelist:
            if not os.path.exists(f):
                raise IOError('cannot find file: %s' % f)

    logger.debug('all boundary conditions files exist')

    #
    # Now process boundary conditions
    #
    for key in vtable.keys():

        if grb_input_delay and key in grb_input_delay:
            logger.debug("applying delay")
            delay = datetime.timedelta(0, grb_input_delay[key] * 60 * 60)
            new_bdy_times = [b - delay for b in bdy_times]
        else:
            logger.debug("no delay applied")
            new_bdy_times = bdy_times

        fmt = grb_input_fmt[key]
        #
        # Generate filelist based on the initial time, and the forecast hour
        #
        filelist = list(
            OrderedDict.fromkeys(shared.get_bdy_filenames(fmt, new_bdy_times)))

        logger.debug(
            'running link_grib.csh script to link grib files to GRIBFILE.AAA etc'
        )

        os.chdir(wps_run_dir)
        args = ' '.join(filelist)
        cmd = '%s/link_grib.csh %s' % (wps_run_dir, args)
        shared.run_cmd(cmd, config)

        vtab_path = vtable[key]
        prefix = key
        namelist.update('prefix', key)
        namelist.to_file(namelist_wps)
        link_namelist_wps(config)
        vtab_wps = wps_run_dir + '/Vtable'

        if os.path.exists(vtab_wps):
            os.remove(vtab_wps)
        cmd = 'ln -sf %s %s' % (vtab_path, vtab_wps)
        logger.debug(cmd)
        subprocess.call(cmd, shell=True)
        #logger.debug("changing directory to %s" % wps_run_dir)
        #os.chdir(wps_run_dir)
        cmd = '%s/ungrib.exe' % wps_run_dir

        logger.debug(cmd)
        shared.run(cmd, config, wps_run_dir)

        cmd = 'grep "Successful completion" %s/ungrib.log*' % wps_run_dir  # check for success
        ret = shared.run_cmd(cmd, config)
        if ret != 0:
            raise IOError('ungrib.exe did not complete')

    logger.info('*** SUCESS UNGRIB ***\n')
Beispiel #48
0
def ungrib_sst(config):
    """ Runs ungrib.exe for SST fields, makes and modifies a copy of namelist.wps,
    then restores the original namelist.wps"""
    logger = shared.get_logger()
    
    wps_dir      = config['wps_dir']
    wps_run_dir  = config['wps_run_dir']
    tmp_dir      = config['tmp_dir']
    working_dir  = config['working_dir']
    init_time    = config['init_time']
    max_dom      = config['max_dom']
    sst_local_dir = config['sst_local_dir']
    sst_time     = shared.get_sst_time(config)
    sst_filename = shared.get_sst_filename(config)
    vtable_sst   = config['sst_vtable']
    vtable       = wps_run_dir+'/Vtable'
    queue        = config['queue']
    log_file     = '%s/ungrib.sst.log' % wps_run_dir
    namelist_wps  = config['namelist_wps']
    namelist_sst  = '%s/namelist.sst' % working_dir

    namelist      = shared.read_namelist(namelist_wps)

    #
    # update one line to point to the new SST field
    # ungrib.exe will name SST field as e.g.
    # SST:2013-04-24_00
    #
    constants_name = '%s/SST:%s' %(wps_run_dir, sst_time.strftime('%Y-%m-%d_%H'))
    logger.debug('Updating constants_name ----> %s' % constants_name)
    namelist.update('constants_name', constants_name, section='metgrid')

    # Write the changes into the original
    namelist.to_file(namelist_wps)

    #
    # Update start and end time to process SST
    #
    start_str  = sst_time.strftime("%Y-%m-%d_%H:%M:%S")
    end_str    = sst_time.strftime("%Y-%m-%d_%H:%M:%S")
    logger.debug("Updating namelist.sst")
    logger.debug('PREFIX ------> SST')
    logger.debug('start_date---> ' +start_str)
    logger.debug('end_date-----> '+ end_str)

    namelist.update('prefix', 'SST')
    namelist.update('start_date', [start_str]*max_dom)
    namelist.update('end_date',   [end_str]*max_dom)
    logger.debug('writing modified namelist.sst to file -------> %s' % namelist_sst)
    namelist.to_file(namelist_sst)

    #remove any linked namelist.wps 
    logger.debug('removing namelist.wps')
    namelist_run = '%s/namelist.wps' % wps_run_dir
    if os.path.exists(namelist_run):
        os.remove(namelist_run)

    # link namelist.sst to namelist.wps in WPS run dir
    logger.debug('linking namelist.sst -----> namelist.wps')
    cmd = 'ln -sf %s %s' %(namelist_sst, namelist_run)
    shared.run_cmd(cmd, config)

    logger.debug('removing Vtable')
    if os.path.exists(vtable):
        os.remove(vtable)
    logger.debug('linking Vtable.SST ----> Vtable')
    cmd = 'ln -sf %s %s' %(vtable_sst, vtable)
    shared.run_cmd(cmd, config)

    # run link_grib to link SST gribs files
    logger.debug('Linking SST GRIB files')
    cmd = '%s/link_grib.csh %s/%s' %(wps_dir, sst_local_dir, sst_filename)
    shared.run_cmd(cmd, config)


    logger.info('\n*** RUNNING UNGRIB FOR SST ***')
    cmd     =  '%s/ungrib.exe' % wps_run_dir
    shared.run_cmd(cmd, config)

    cmd = 'grep "Successful completion" ./ungrib.log*' # check for success
    ret = shared.run_cmd(cmd, config)
    if ret!=0:
        raise IOError('Ungrib failed for SST')
    
    logger.info('*** SUCCESS UNGRIB SST ***\n')
    logger.debug('Removing namelist.wps')
    if os.path.exists(namelist_run): 
        os.remove(namelist_run)
    # link in original (unmodified) namelist.wps
    cmd = 'ln -sf %s %s' %(namelist_wps, namelist_run)    
    shared.run_cmd(cmd, config)
Beispiel #49
0
def run_ungrib(config):
    """ Runs ungrib.exe and checks output was sucessfull
    If vtable and gbr_input_fmt are NOT dictionaries, 
    then dictionarius will be constructed from them using 
    the key bdy_conditions from the metadata
    
    Arguments:
    config -- dictionary specifying configuration options
    
    """
    logger        =shared.get_logger()
    wps_dir       = config['wps_dir']
    wps_run_dir   = config['wps_run_dir']
    namelist_wps  = config['namelist_wps']
    working_dir   = config['working_dir']    
    met_em_dir    = config['met_em_dir']
    init_time     = config['init_time']
    log_file      = '%s/ungrib.log' % wps_run_dir
    vtable        = config['vtable']
    grb_input_fmt  = config['grb_input_fmt']
    grb_input_delay = config.get("grb_input_delay")  # this allows None to be returned 
    
    bdy_conditions = config['bdy_conditions']
    
    
    
    
    
    logger.info("\n*** RUNNING UNGRIB ***")
    
    namelist = shared.read_namelist(namelist_wps)
    
    bdy_times     = shared.get_bdy_times(config)
    

    if type(grb_input_fmt)!=type({}):
        grb_input_fmt = {bdy_conditions:grb_input_fmt}

    if type(vtable)!=type({}):
        vtable = {bdy_conditions:vtable}


    #
    # Check that boundary conditions exist
    #     
    for key in vtable.keys():
        
        
        if grb_input_delay and key in grb_input_delay:
            logger.debug("applying delay")
            delay = datetime.timedelta(0, grb_input_delay[key]*60*60)
            new_bdy_times = [b - delay for b in bdy_times]
        else:
            logger.debug("no delay applied")
            new_bdy_times = bdy_times
        
        fmt = grb_input_fmt[key]
        #
        # Generate filelist based on the initial time, and the forecast hour
        #        
        filelist = list(OrderedDict.fromkeys(shared.get_bdy_filenames(fmt, new_bdy_times)))

        #
        # Check the boundary files exist
        #
        logger.debug('checking boundary condition files exists')    
        for f in filelist:
            if not os.path.exists(f):
                raise IOError('cannot find file: %s' %f)
        
    
    
    logger.debug('all boundary conditions files exist')
    
    #
    # Now process boundary conditions
    #
    for key in vtable.keys():

        if grb_input_delay and key in grb_input_delay:
            logger.debug("applying delay")
            delay = datetime.timedelta(0, grb_input_delay[key]*60*60)
            new_bdy_times = [b - delay for b in bdy_times]
        else:
            logger.debug("no delay applied")
            new_bdy_times = bdy_times
        
        fmt = grb_input_fmt[key]
        #
        # Generate filelist based on the initial time, and the forecast hour
        #        
        filelist = list(OrderedDict.fromkeys(shared.get_bdy_filenames(fmt, new_bdy_times)))

        
        logger.debug('running link_grib.csh script to link grib files to GRIBFILE.AAA etc')
        
        os.chdir(wps_run_dir)
        args = ' '.join(filelist)
        cmd = '%s/link_grib.csh %s' %(wps_run_dir,args)
        shared.run_cmd(cmd, config)
  
        vtab_path = vtable[key]
        prefix = key
        namelist.update('prefix', key)
        namelist.to_file(namelist_wps)
        link_namelist_wps(config)
        vtab_wps  = wps_run_dir+'/Vtable'

        if os.path.exists(vtab_wps):
            os.remove(vtab_wps)
        cmd = 'ln -sf %s %s' %(vtab_path, vtab_wps)
        logger.debug(cmd)
        subprocess.call(cmd, shell=True)    
        #logger.debug("changing directory to %s" % wps_run_dir)
        #os.chdir(wps_run_dir)
        cmd     =  '%s/ungrib.exe' % wps_run_dir
        
        logger.debug(cmd)
        shared.run(cmd, config, wps_run_dir)

        cmd = 'grep "Successful completion" %s/ungrib.log*' % wps_run_dir # check for success
        ret =shared.run_cmd(cmd,config)
        if ret!=0:
            raise IOError('ungrib.exe did not complete')
    
    logger.info('*** SUCESS UNGRIB ***\n')
Beispiel #50
0
def update_namelist_input(config):
    """ Updates the namelist.input file to reflect updated settings in config.
    Adds a non-standard &metadata section to give a name to the model run
    
    Arguments:
    config -- dictionary containing various configuration options
        
    """
    logger = shared.get_logger()
    logger.debug('*** UPDATING namelist.input ***')

    #wrf_dir       = config['wrf_dir']
    working_dir = config['working_dir']
    model = config['model']
    model_run = config['model_run']

    domain = config['domain']

    namelist_run = '%s/namelist.input' % working_dir
    namelist_input = config['namelist_input']
    namelist_wps = config['namelist_wps']
    shutil.copyfile(namelist_input, namelist_input + '.backup')

    # read settings from domain-based namelist
    namelist = shared.read_namelist(namelist_input)

    # read settings from domain-based namelist.wps
    #namelist_wps   = shared.read_namelist(namelist_wps)
    #wps_settings   = namelist_wps.settings

    #
    # Add new metadata section to namelist.input
    #
    logger.debug('Adding metatdata to the namelist.input')
    logger.debug('domain = %s' % domain)
    logger.debug('model = %s' % model)
    logger.debug('model_run = %s' % model_run)
    namelist.update('domain', domain, 'metadata')
    namelist.update('model', model, 'metadata')
    namelist.update('model_run', model_run, 'metadata')

    #
    # Overule max_dom with one in config
    #
    max_dom = config['max_dom']
    namelist.update('max_dom', max_dom)
    #logger.debug("Syncing dx and dy between namelist.wps and namelist.input")
    #dx = wps_settings['dx']
    #dy = wps_settings['dy']
    #logger.debug("namleist.wps: dx: %s ------> namelist.input" % dx)
    #logger.debug("namleist.wps: dy: %s ------> namelist.input" % dy)
    #namelist.update('dx', wps_settings['dx'])
    #namelist.update('dy', wps_settings['dy'])
    fcst_hours = config['fcst_hours']
    fcst_times = shared.get_fcst_times(config)
    history_interval = config['history_interval']
    bdy_interval = config['bdy_interval']  # this is in hours

    interval_seconds = 60 * 60 * bdy_interval

    start = fcst_times[0]
    end = fcst_times[-1]
    diff = end - start

    #
    # I'm still not sure how the WRF namelist works between
    # the start and end settings and the run_XX settings.
    # I think we can just keep days as zero, and work entirely
    # in hours
    #
    namelist.update('start_year', [start.year] * max_dom)
    namelist.update('start_month', [start.month] * max_dom)
    namelist.update('start_day', [start.day] * max_dom)
    namelist.update('start_hour', [start.hour] * max_dom)
    namelist.update('end_year', [end.year] * max_dom)
    namelist.update('end_month', [end.month] * max_dom)
    namelist.update('end_day', [end.day] * max_dom)
    namelist.update('end_hour', [end.hour] * max_dom)
    namelist.update('run_days', [0])
    namelist.update('run_hours', [fcst_hours])
    #namelist.update('run_minutes',[0])
    namelist.update('history_interval', [history_interval] * max_dom)
    namelist.update('interval_seconds', [interval_seconds])

    #
    # If DFI is being used, update DFI settings
    # From user guide:
    # "For time specification, it typically needs to integrate
    # backward for 0.5 to 1 hour, and integrate forward for half of the time."
    #
    # should we just write this everytime into the file and rely of dfi_opt
    # as the on/off switch?
    #
    hour = datetime.timedelta(0, 60 * 60)
    minute = datetime.timedelta(0, 60)
    #dfi_bck      = config['dfi_bck'] * minute
    #dfi_fwd      = config['dfi_fwd'] * minute
    #dfi_bckstop  = start - dfi_bck
    #dfi_fwdstop  = start + dfi_fwd

    #namelist.update('dfi_bckstop_year',   dfi_bckstop.year,   'dfi_control')
    #namelist.update('dfi_bckstop_month',  dfi_bckstop.month,  'dfi_control')
    #namelist.update('dfi_bckstop_day',    dfi_bckstop.day,    'dfi_control')
    #namelist.update('dfi_bckstop_hour',   dfi_bckstop.hour,   'dfi_control')
    #namelist.update('dfi_bckstop_minute', dfi_bckstop.minute, 'dfi_control')
    #namelist.update('dfi_bckstop_second', dfi_bckstop.second, 'dfi_control')
    #namelist.update('dfi_fwdstop_year',   dfi_fwdstop.year,   'dfi_control')
    #namelist.update('dfi_fwdstop_month',  dfi_fwdstop.month,  'dfi_control')
    #namelist.update('dfi_fwdstop_day',    dfi_fwdstop.day,    'dfi_control')
    #namelist.update('dfi_fwdstop_hour',   dfi_fwdstop.hour,   'dfi_control')
    #namelist.update('dfi_fwdstop_minute', dfi_fwdstop.minute, 'dfi_control')
    #namelist.update('dfi_fwdstop_second', dfi_fwdstop.second, 'dfi_control')

    logger.debug('writing new settings to file')
    namelist.to_file(namelist_input)

    #logger.debug(namelist)
    logger.debug('*** FINISHED UPDATING namelist.input ***')
Beispiel #51
0
def prepare_wps(config):
    """ Runs all the pre-processing steps necessary for running WPS.
    
    Reads the current value of init_time from config, and links 
    boundary condition files into correct directory. Creates an output
    directory for the met_em files.
    
    Arguments:
    config -- dictionary containing various configuration options"""
    
    logger       = shared.get_logger()
    logger.debug('*** PREPARING FILES FOR WPS ***')
    
    wps_dir       = config['wps_dir']          # the base installation of WPS
    wps_run_dir   = config['wps_run_dir']      # the directory to run WPS from
    working_dir = config['working_dir']    # model run directory 
    met_em_dir    = config['met_em_dir']
    init_time     = config['init_time']

    
    grb_input_fmt = config['grb_input_fmt']
    vtable        = config['vtable']
    bdy_times     = shared.get_bdy_times(config)

    if type(grb_input_fmt)==type({}):
        logger.debug(grb_input_fmt)
        fmts = grb_input_fmt.values()
        
    else:
        fmts = [grb_input_fmt]
    
    
    for fmt in fmts:
        #
        # Generate filelist based on the initial time, and the forecast hour
        #        
        filelist = shared.get_bdy_filenames(fmt, bdy_times)

        #
        # Check the boundary files exist
        #
        logger.debug('checking boundary condition files exists')    
        for f in filelist:
            if not os.path.exists(f):
                raise IOError('cannot find file: %s' %f)
        
    logger.debug('all boundary conditions files exist')
    
    #
    # Run the link_grib scipt to link the FNL files
    #
    logger.debug('running link_grib.csh script to link grib files to GRIBFILE.AAA etc')
    os.chdir(wps_run_dir)
    args = ' '.join(filelist)
    cmd = '%s/link_grib.csh %s' %(wps_run_dir,args)
    shared.run(cmd, config)

    logger.debug('Path for met_em files is %s' % met_em_dir)
    if not os.path.exists(met_em_dir):
        os.makedirs(met_em_dir)

   
    logger.debug('*** FINISHED PREPARING FILES FOR WPS ***')    
Beispiel #52
0
def update_namelist_input(config):    
    """ Updates the namelist.input file to reflect updated settings in config.
    Adds a non-standard &metadata section to give a name to the model run
    
    Arguments:
    config -- dictionary containing various configuration options
        
    """        
    logger =shared.get_logger()        
    logger.debug('*** UPDATING namelist.input ***')
    

    #wrf_dir       = config['wrf_dir']
    working_dir   = config['working_dir']
    model         = config['model']
    model_run     = config['model_run']
    
    domain        = config['domain']
        
    namelist_run  = '%s/namelist.input'  % working_dir
    namelist_input = config['namelist_input']
    namelist_wps   = config['namelist_wps']
    shutil.copyfile(namelist_input, namelist_input+'.backup')
    
    # read settings from domain-based namelist
    namelist        = shared.read_namelist(namelist_input)   

    # read settings from domain-based namelist.wps
    #namelist_wps   = shared.read_namelist(namelist_wps)   
    #wps_settings   = namelist_wps.settings


    #
    # Add new metadata section to namelist.input
    #
    logger.debug('Adding metatdata to the namelist.input')
    logger.debug('domain = %s'    %domain)
    logger.debug('model = %s'     %model)
    logger.debug('model_run = %s' %model_run)
    namelist.update('domain', domain, 'metadata')
    namelist.update('model',model, 'metadata')
    namelist.update('model_run',model_run, 'metadata')  

    
    #
    # Overule max_dom with one in config
    #
    max_dom = config['max_dom']
    namelist.update('max_dom', max_dom)
    #logger.debug("Syncing dx and dy between namelist.wps and namelist.input")
    #dx = wps_settings['dx']
    #dy = wps_settings['dy']
    #logger.debug("namleist.wps: dx: %s ------> namelist.input" % dx)
    #logger.debug("namleist.wps: dy: %s ------> namelist.input" % dy)
    #namelist.update('dx', wps_settings['dx'])
    #namelist.update('dy', wps_settings['dy'])    
    fcst_hours       = config['fcst_hours']
    fcst_times       = shared.get_fcst_times(config)
    history_interval = config['history_interval']   
    bdy_interval     = config['bdy_interval']  # this is in hours         
    
    interval_seconds = 60*60*bdy_interval
    
    start   = fcst_times[0]
    end     = fcst_times[-1]
    diff    = end - start
    
    
    #
    # I'm still not sure how the WRF namelist works between 
    # the start and end settings and the run_XX settings.
    # I think we can just keep days as zero, and work entirely
    # in hours
    #
    namelist.update('start_year', [start.year] * max_dom)
    namelist.update('start_month',[start.month]* max_dom)
    namelist.update('start_day',  [start.day]  * max_dom)
    namelist.update('start_hour', [start.hour] * max_dom)
    namelist.update('end_year',   [end.year]   * max_dom)
    namelist.update('end_month',  [end.month]  * max_dom)
    namelist.update('end_day',    [end.day]    * max_dom)
    namelist.update('end_hour',   [end.hour]   * max_dom)
    namelist.update('run_days',   [0])   
    namelist.update('run_hours',  [fcst_hours] )   
    #namelist.update('run_minutes',[0])   
    namelist.update('history_interval', [history_interval] * max_dom)
    namelist.update('interval_seconds', [interval_seconds])


    #
    # If DFI is being used, update DFI settings
    # From user guide:
    # "For time specification, it typically needs to integrate 
    # backward for 0.5 to 1 hour, and integrate forward for half of the time."
    #
    # should we just write this everytime into the file and rely of dfi_opt 
    # as the on/off switch?
    #
    hour         = datetime.timedelta(0, 60*60)
    minute       = datetime.timedelta(0, 60)
    #dfi_bck      = config['dfi_bck'] * minute
    #dfi_fwd      = config['dfi_fwd'] * minute
    #dfi_bckstop  = start - dfi_bck
    #dfi_fwdstop  = start + dfi_fwd
    

    #namelist.update('dfi_bckstop_year',   dfi_bckstop.year,   'dfi_control')
    #namelist.update('dfi_bckstop_month',  dfi_bckstop.month,  'dfi_control')
    #namelist.update('dfi_bckstop_day',    dfi_bckstop.day,    'dfi_control')
    #namelist.update('dfi_bckstop_hour',   dfi_bckstop.hour,   'dfi_control')
    #namelist.update('dfi_bckstop_minute', dfi_bckstop.minute, 'dfi_control')
    #namelist.update('dfi_bckstop_second', dfi_bckstop.second, 'dfi_control')
    #namelist.update('dfi_fwdstop_year',   dfi_fwdstop.year,   'dfi_control')
    #namelist.update('dfi_fwdstop_month',  dfi_fwdstop.month,  'dfi_control')
    #namelist.update('dfi_fwdstop_day',    dfi_fwdstop.day,    'dfi_control')
    #namelist.update('dfi_fwdstop_hour',   dfi_fwdstop.hour,   'dfi_control')
    #namelist.update('dfi_fwdstop_minute', dfi_fwdstop.minute, 'dfi_control')
    #namelist.update('dfi_fwdstop_second', dfi_fwdstop.second, 'dfi_control')
   
    logger.debug('writing new settings to file')
    namelist.to_file(namelist_input)
    
    #logger.debug(namelist)
    logger.debug('*** FINISHED UPDATING namelist.input ***')  
Beispiel #53
0
def produce_ncl_ol_plots(config):
    """ Calls a series of ncl scripts to produce visualisations.
    
    Need to think about how to define a flexible visualisation framework
    Currently communication with NCL is via environment variables
    Perhaps in future we should move to PyNGL for easier (direct) integration
    as then we could simply pass in the config dictionary, use the same logging 
    framework, and make use of a vtable like mapping to forecast vars.
    
    However, for the time being we design each ncl script to expect 
    certain environment variables.  Then, the list of ncl scripts to 
    run can simply be specified somewhere in the config file
    e.g. wrf_basic_plots.ncl, wrf_vertical_plots.ncl etc.
    
    Updated: Some plots are much easier to produce using the original 
    wrfout netcdf files, rather than use the UPP post-processed grib files. 
    Howeverm in future we should stick with one or the other.
    
    
    Arguments:
    config -- dictionary containing various configuration options """
    
    logger = shared.get_logger()    
    logger.info('*** RUNNING NCL SCRIPTS ***')
     
    working_dir  = config['working_dir']

    ncl_code_dir   = config['ncl_code_dir']
    ncl_files      = config['ncl_ol_code']
    #ncl_code       = ['%s/%s' % (ncl_code_dir, f) for f in ncl_files]
    ncl_code       = ncl_files
    ncl_log        = config['ncl_log']
    wrftools_dir   = config['wrftools_dir']
    wrfout_dir     = config['wrfout_dir']
    init_time      = config['init_time']
    dom            = config['dom']
    fcst_file      = '%s/wrfout_d%02d_%s:00:00.nc' %(wrfout_dir, dom, init_time.strftime("%Y-%m-%d_%H"))
    ncl_out_dir    = shared.sub_date(config['ncl_ol_out_dir'], init_time=init_time)
    ncl_out_type   = config['ncl_out_type']
    nest_id        =  '%02d' % dom
    

    ncl_in_file    = fcst_file
    ncl_loc_file   = config['locations_file']
    ncl_out_dir    = shared.sub_date(config['ncl_out_dir'], init_time=init_time)
    ncl_out_type   = config['ncl_out_type']


    if not os.path.exists(ncl_out_dir):
        os.makedirs(ncl_out_dir)

    #
    # Communicate to NCL via environment variables
    # NCL expects the following to be set
    #File    = getenv("FCST_FILE")
    #type    = getenv("NCL_OUT_TYPE")
    #diro    = getenv("NCL_OUT_DIR")
    #;web_dir = getenv("WEB_DIR")
    #domain  = getenv("NEST_ID")    
    #run_hour = getenv("RUN_HOUR")

    #
    # Try escaping : in fcst_file
    #
    #fcst_file = fcst_file.replace(':', r'\:')
    #os.environ['FCST_FILE']      = fcst_file
    #os.environ['NCL_OUT_DIR']    = ncl_out_dir
    #os.environ['NCL_OUT_TYPE']   = ncl_out_type
    #os.environ['NEST_ID']        = nest_id
    #os.environ['DOMAIN']         = domain
    #os.environ['MODEL_RUN']      = model_run



    #logger.debug('Setting environment variables')
    logger.debug('FCST_FILE    ----> %s' % fcst_file)
    logger.debug('NCL_OUT_DIR  ----> %s' % ncl_out_dir)
    logger.debug('NCL_OUT_TYPE ----> %s' % ncl_out_type)
    logger.debug('NEST_ID      ----> %s' % nest_id)
    logger.debug('PATH')
    logger.debug(os.environ['PATH'])
    #logger.debug('DOMAIN       ----> %s' % domain)
    #logger.debug('MODEL_RUN    ----> %s' % model_run)


    for script in ncl_code:
        #cmd  = "ncl %s >> %s 2>&1" % (script, ncl_log)
        cmd  = "ncl %s " % script
        #qcmd = 'qrsh -cwd -l mem_total=36G "%s"' % cmd
        
        logger.warn("NCL to produce GEOTIFFS does not work on post-processing queue, runnign on head node")

        

        cmd  = """ncl 'ncl_in_file="%s"' 'ncl_out_dir="%s"' 'ncl_out_type="%s"' 'ncl_loc_file="%s"' %s 2>&1 >> %s/ncl.log""" % (ncl_in_file,ncl_out_dir, ncl_out_type, ncl_loc_file, script, working_dir)

        ret = shared.run_cmd(cmd, config)
        
        
        gwarp = config['gwarp']
        os.chdir(ncl_out_dir)
        
        cmd = "%s %s/*.tiff" %(gwarp, ncl_out_dir)
        logger.debug(cmd)
        shared.run_cmd(cmd, config)
Beispiel #54
0
def prepare_wps(config):
    """ Runs all the pre-processing steps necessary for running WPS.
    
    Reads the current value of init_time from config, and links 
    boundary condition files into correct directory. Creates an output
    directory for the met_em files.
    
    Arguments:
    config -- dictionary containing various configuration options"""

    logger = shared.get_logger()
    logger.debug('*** PREPARING FILES FOR WPS ***')

    wps_dir = config['wps_dir']  # the base installation of WPS
    wps_run_dir = config['wps_run_dir']  # the directory to run WPS from
    working_dir = config['working_dir']  # model run directory
    met_em_dir = config['met_em_dir']
    init_time = config['init_time']

    grb_input_fmt = config['grb_input_fmt']
    vtable = config['vtable']
    bdy_times = shared.get_bdy_times(config)

    if type(grb_input_fmt) == type({}):
        logger.debug(grb_input_fmt)
        fmts = grb_input_fmt.values()

    else:
        fmts = [grb_input_fmt]

    for fmt in fmts:
        #
        # Generate filelist based on the initial time, and the forecast hour
        #
        filelist = shared.get_bdy_filenames(fmt, bdy_times)

        #
        # Check the boundary files exist
        #
        logger.debug('checking boundary condition files exists')
        for f in filelist:
            if not os.path.exists(f):
                raise IOError('cannot find file: %s' % f)

    logger.debug('all boundary conditions files exist')

    #
    # Run the link_grib scipt to link the FNL files
    #
    logger.debug(
        'running link_grib.csh script to link grib files to GRIBFILE.AAA etc')
    os.chdir(wps_run_dir)
    args = ' '.join(filelist)
    cmd = '%s/link_grib.csh %s' % (wps_run_dir, args)
    shared.run(cmd, config)

    logger.debug('Path for met_em files is %s' % met_em_dir)
    if not os.path.exists(met_em_dir):
        os.makedirs(met_em_dir)

    logger.debug('*** FINISHED PREPARING FILES FOR WPS ***')
Beispiel #55
0
def run_unipost(config):
    """ Runs the Universal Post Processor for each forecast time. 
    Translated from the run_unipost_frames shell script. A post-processing
    directory should exist, specified by the post_dir entry in config, and
    the UPP control file wrf_cntrl should exist within this directory.
    
    TODO: tidy up some of the hangovers from the shell script version
    
    Arguments:
    config -- dictionary containing various configuration options
    
    """
    logger = shared.get_logger()    
    logger.info('*** RUNNING UNIVERSAL POST PROCESSOR ***')
    
    domain_dir    = config['domain_dir']
    max_dom       = config['max_dom']
    dom           = config['dom'] # current domain number
    model_run     = config['model_run']
    wrfout_dir    = '%s/%s/wrfout' %(domain_dir, model_run)    


    post_dir      = '%s/%s/postprd' % (domain_dir, model_run)
    wrf_cntrl     = post_dir+'/wrf_cntrl.parm'
    upp_dir       = config['upp_dir']
    wrf_working_dir   = config['wrf_dir']+'/run'
    namelist      = read_namelist(wrf_working_dir+'/namelist.input')

    fcst_times    = get_fcst_times(config)    
    init_time     = fcst_times[0]
    history_interval = config['history_interval']
    grb_fmt       = config['grb_fmt']


    #----CREATE DIRECTORIES-----------------------------------------------
    # Create archive directories to store data and settings
    #---------------------------------------------------------------------


    wrfpost_dir    = '%s/%s/wrfpost' %(domain_dir,model_run)

    if not os.path.exists(wrfpost_dir):
        os.makedirs(wrfpost_dir)

    #----PREPARATION-------------------------------------------------------
    # Link all the relevant files need to compute various diagnostics
    #---------------------------------------------------------------------
    
    #
    # Everything is done within the postprd directory
    #
    logger.debug('Going into postprd directory: %s' %post_dir)
    #os.chdir(post_dir)

    #
    # Clean up old output files
    #
    #logger.debug('Removing old output files')
    cmd = 'rm -f %s/*.out' % post_dir
    shared.run_cmd(cmd, config)
    cmd = 'rm -f %s/*.tm00' % post_dir
    shared.run_cmd(cmd, config)
    
    
    # Link Ferrier's microphysic's table and Unipost control file, 
    cmd = 'ln -sf %s/ETAMPNEW_DATA ./eta_micro_lookup.dat' % wrf_working_dir
    shared.run_cmd(cmd, config)
    
    #
    # Get local copy of parm file
    # no - lets force the user to manually ensure a copy is placed
    # in the postprd diretory first
    # os.system('ln -sf ../parm/wrf_cntrl.parm .')
    
    #
    # Check wrf_cntrl file exists
    #
    if not os.path.exists(wrf_cntrl):
        raise IOError('could not find control file: %s'% wrf_cntrl)
    
    
    #
    # link coefficients for crtm2 (simulated GOES)
    # Jeez - these should really get called via run_cmd for 
    # consistency, but I can't be chewed right now
    #
    CRTMDIR  = upp_dir+'/src/lib/crtm2/coefficients'
    os.system('ln -fs %s/EmisCoeff/Big_Endian/EmisCoeff.bin           ./' %CRTMDIR)
    os.system('ln -fs %s/AerosolCoeff/Big_Endian/AerosolCoeff.bin     ./' %CRTMDIR)
    os.system('ln -fs %s/CloudCoeff/Big_Endian/CloudCoeff.bin         ./' %CRTMDIR)
    os.system('ln -fs %s/SpcCoeff/Big_Endian/imgr_g12.SpcCoeff.bin    ./' %CRTMDIR)
    os.system('ln -fs %s/TauCoeff/Big_Endian/imgr_g12.TauCoeff.bin    ./' %CRTMDIR)
    os.system('ln -fs %s/SpcCoeff/Big_Endian/imgr_g11.SpcCoeff.bin    ./' %CRTMDIR)
    os.system('ln -fs %s/TauCoeff/Big_Endian/imgr_g11.TauCoeff.bin    ./' %CRTMDIR)
    os.system('ln -fs %s/SpcCoeff/Big_Endian/amsre_aqua.SpcCoeff.bin  ./' %CRTMDIR)
    os.system('ln -fs %s/TauCoeff/Big_Endian/amsre_aqua.TauCoeff.bin  ./' %CRTMDIR)
    os.system('ln -fs %s/SpcCoeff/Big_Endian/tmi_trmm.SpcCoeff.bin    ./' %CRTMDIR)
    os.system('ln -fs %s/TauCoeff/Big_Endian/tmi_trmm.TauCoeff.bin    ./' %CRTMDIR)
    os.system('ln -fs %s/SpcCoeff/Big_Endian/ssmi_f15.SpcCoeff.bin    ./' %CRTMDIR)
    os.system('ln -fs %s/TauCoeff/Big_Endian/ssmi_f15.TauCoeff.bin    ./' %CRTMDIR)
    os.system('ln -fs %s/SpcCoeff/Big_Endian/ssmis_f20.SpcCoeff.bin   ./' %CRTMDIR)
    os.system('ln -fs %s/TauCoeff/Big_Endian/ssmis_f20.TauCoeff.bin   ./' %CRTMDIR)
    
    os.putenv('MP_SHARED_MEMORY', 'yes')
    os.putenv('MP_LABELIO', 'yes')
    os.putenv('tmmark', 'tm00')
    
    
    #
    # Run unipost for each time in the output file
    # Note that unipost names the intermediate files
    # WRFPRShhh.tm00 where hhh is the forecast hour
    #
    for n,t in enumerate(fcst_times):
    
        current_time = t.strftime('%Y-%m-%d_%H:%M:%S')
        fhr          = n*history_interval/60
        #logger.debug('post processing time %s, fhr %d ' % (current_time, fhr))

        #
        # Assume the forecast is contained in one wrfout file 
        # named according to the forecast initial time
        #
        wrfout = '%s/wrfout_d%02d_%s' %(wrfout_dir,dom, init_time.strftime('%Y-%m-%d_%H:%M:%S'))
        #logger.debug('looking for file: %s' % wrfout)
        
        #--- itag file --------------------------------------------------------
        #   Create input file for Unipost
        #   First line is where your wrfout data is
        #   Second line is the format
        #   Third line is the time for this process file
        #   Forth line is a tag identifing the model (WRF, GFS etc)
        #----------------------------------------------------------------------
        #logger.debug('writing itag file')
        #logger.debug('time in itag file: %s' %current_time)
        itag = open('itag', 'w')
        itag.write('%s\n'%wrfout)
        itag.write('netcdf\n')
        itag.write('%s\n'%current_time)
        itag.write('NCAR\n')
        itag.close()
        
        #-----------------------------------------------------------------------
        #  Check wrf_cntrl.parm file exists
        #-----------------------------------------------------------------------            
        
        
        
        #-----------------------------------------------------------------------
        #   Run unipost.
        #-----------------------------------------------------------------------            
        os.system('rm -f fort.*')
        os.system('ln -sf wrf_cntrl.parm fort.14')
        os.system('ln -sf griddef.out fort.110')
        cmd = '%s/bin/unipost.exe < itag > unipost_d%02d.%s.out 2>&1' %(upp_dir, dom,current_time)
        shared.run_cmd(cmd, config)
        
        tmp_name = 'WRFPRS%03d.tm00' % fhr
        grb_name = 'wrfpost_d%02d_%s.tm00' %(dom,current_time)
        
        #
        # If keeping same format, just move output file
        #
        cmd = 'mv %s %s' %(tmp_name, grb_name)
        shared.run_cmd(cmd, config)
        
        #
        # Convert to grib2 format if required
        #            
        #if grb_fmt=='grib2':
        #    cmd = 'cnvgrib -g12 %s %s' %(tmp_name, grb_name) 
        #    shared.run_cmd(cmd, config)
            
    logger.debug('concatenating grib records into single file for domain dom %02d...' %dom)
    outname = 'wrfpost_d%02d_%s.grb'%(dom,init_time.strftime('%Y-%m-%d_%H'))
    cmd     = 'cat wrfpost_d%02d_*.tm00 > %s' %(dom, outname)
    shared.run_cmd(cmd, config)

    
    #-----------------------------------------------------------------------
    # Archive
    #-----------------------------------------------------------------------
    cmd = 'mv %s %s' %(outname, wrfpost_dir)

    ret = shared.run_cmd(cmd, config)
    
    if ret!=0:
        raise IOError('could not move post-processed output')
  
  
    logger.info("*** SUCESS UPP ***")
Beispiel #56
0
def ungrib_sst(config):
    """ Runs ungrib.exe for SST fields, makes and modifies a copy of namelist.wps,
    then restores the original namelist.wps"""
    logger = shared.get_logger()

    wps_dir = config['wps_dir']
    wps_run_dir = config['wps_run_dir']
    tmp_dir = config['tmp_dir']
    working_dir = config['working_dir']
    init_time = config['init_time']
    max_dom = config['max_dom']
    sst_local_dir = config['sst_local_dir']
    sst_time = shared.get_sst_time(config)
    sst_filename = shared.get_sst_filename(config)
    vtable_sst = config['sst_vtable']
    vtable = wps_run_dir + '/Vtable'
    queue = config['queue']
    log_file = '%s/ungrib.sst.log' % wps_run_dir
    namelist_wps = config['namelist_wps']
    namelist_sst = '%s/namelist.sst' % working_dir

    namelist = shared.read_namelist(namelist_wps)

    #
    # update one line to point to the new SST field
    # ungrib.exe will name SST field as e.g.
    # SST:2013-04-24_00
    #
    constants_name = '%s/SST:%s' % (wps_run_dir,
                                    sst_time.strftime('%Y-%m-%d_%H'))
    logger.debug('Updating constants_name ----> %s' % constants_name)
    namelist.update('constants_name', constants_name, section='metgrid')

    # Write the changes into the original
    namelist.to_file(namelist_wps)

    #
    # Update start and end time to process SST
    #
    start_str = sst_time.strftime("%Y-%m-%d_%H:%M:%S")
    end_str = sst_time.strftime("%Y-%m-%d_%H:%M:%S")
    logger.debug("Updating namelist.sst")
    logger.debug('PREFIX ------> SST')
    logger.debug('start_date---> ' + start_str)
    logger.debug('end_date-----> ' + end_str)

    namelist.update('prefix', 'SST')
    namelist.update('start_date', [start_str] * max_dom)
    namelist.update('end_date', [end_str] * max_dom)
    logger.debug('writing modified namelist.sst to file -------> %s' %
                 namelist_sst)
    namelist.to_file(namelist_sst)

    #remove any linked namelist.wps
    logger.debug('removing namelist.wps')
    namelist_run = '%s/namelist.wps' % wps_run_dir
    if os.path.exists(namelist_run):
        os.remove(namelist_run)

    # link namelist.sst to namelist.wps in WPS run dir
    logger.debug('linking namelist.sst -----> namelist.wps')
    cmd = 'ln -sf %s %s' % (namelist_sst, namelist_run)
    shared.run_cmd(cmd, config)

    logger.debug('removing Vtable')
    if os.path.exists(vtable):
        os.remove(vtable)
    logger.debug('linking Vtable.SST ----> Vtable')
    cmd = 'ln -sf %s %s' % (vtable_sst, vtable)
    shared.run_cmd(cmd, config)

    # run link_grib to link SST gribs files
    logger.debug('Linking SST GRIB files')
    cmd = '%s/link_grib.csh %s/%s' % (wps_dir, sst_local_dir, sst_filename)
    shared.run_cmd(cmd, config)

    logger.info('\n*** RUNNING UNGRIB FOR SST ***')
    cmd = '%s/ungrib.exe' % wps_run_dir
    shared.run_cmd(cmd, config)

    cmd = 'grep "Successful completion" ./ungrib.log*'  # check for success
    ret = shared.run_cmd(cmd, config)
    if ret != 0:
        raise IOError('Ungrib failed for SST')

    logger.info('*** SUCCESS UNGRIB SST ***\n')
    logger.debug('Removing namelist.wps')
    if os.path.exists(namelist_run):
        os.remove(namelist_run)
    # link in original (unmodified) namelist.wps
    cmd = 'ln -sf %s %s' % (namelist_wps, namelist_run)
    shared.run_cmd(cmd, config)
Beispiel #57
0
def produce_ncl_plots(config):
    """ Calls a series of ncl scripts to produce visualisations.
    
    Need to think about how to define a flexible visualisation framework
    Currently communication with NCL is via environment variables
    Perhaps in future we should move to PyNGL for easier (direct) integration
    as then we could simply pass in the config dictionary, use the same logging 
    framework, and make use of a vtable like mapping to forecast vars.
    
    However, for the time being we design each ncl script to expect 
    certain environment variables.  Then, the list of ncl scripts to 
    run can simply be specified somewhere in the config file
    e.g. wrf_basic_plots.ncl, wrf_vertical_plots.ncl etc.
    
    
    Arguments:
    config -- dictionary containing various configuration options """
    
    logger = shared.get_logger()    

     

    domain         = config['domain']
    model_run      = config['model_run']
    working_dir    = config['working_dir']
    ncl_code_dir   = config['ncl_code_dir']
    ncl_files      = config['ncl_code']
    ncl_code       =  ncl_files
    ncl_log        = config['ncl_log']
    wrfout_dir     = config['wrfout_dir']
    init_time      = config['init_time']
    dom            = config['dom']
    fcst_file      = '%s/wrfout_d%02d_%s:00:00.nc' %(wrfout_dir, dom, init_time.strftime("%Y-%m-%d_%H"))
    ncl_in_file    = fcst_file
    ncl_loc_file   = config['locations_file']
    ncl_out_dir    = shared.sub_date(config['ncl_out_dir'], init_time=init_time)
    ncl_out_type   = config['ncl_out_type']
    nest_id        =  '%02d' % dom
    ncl_opt_template = config['ncl_opt_template']
    ncl_opt_file = config['ncl_opt_file']
    extract_hgts = config['extract_hgts']
    logger.info('*** RUNNING NCL SCRIPTS FOR DOMAIN d%02d***' % dom)
    
    if not os.path.exists(ncl_out_dir):
        os.makedirs(ncl_out_dir)

    if not ncl_in_file.endswith('.nc'):
        ncl_in_file = ncl_in_file + '.nc' 

    ncl_hgts = '(/%s/)' % ','.join(map(str,extract_hgts))        
        
    replacements = {'<ncl_in_file>'  : ncl_in_file, 
                    '<ncl_out_dir>'  : ncl_out_dir, 
                    '<ncl_out_type>' : ncl_out_type,
                    '<ncl_loc_file>' : ncl_loc_file,
                    '<extract_heights>' : ncl_hgts} 
        

    fill_template(ncl_opt_template, ncl_opt_file, replacements)
        
    logger.debug('ncl_opt_template: %s' % ncl_opt_template)
    logger.debug('    ncl_in_file  ----> %s' % ncl_in_file)
    logger.debug('    ncl_out_dir  ----> %s' % ncl_out_dir)
    logger.debug('    ncl_out_type ----> %s' % ncl_out_type)
    logger.debug('    ncl_loc_file ----> %s' % ncl_loc_file)
    logger.debug('ncl_opt_file: %s' % ncl_opt_file)

    for script in ncl_code:
        #
        # mem_total forces the use postprocessing node
        #
        #cmd  = "ncl %s >> %s 2>&1" % (script, ncl_log)
        #qcmd = 'qrsh -cwd -l mem_total=36G "%s"' % cmd
        logger.debug(script)
        
        queue = config['queue']
        cmd = "ncl %s" % script
        
        
        #if queue['ncl']:
            # works on Schumi
        
            #cmd  = """ncl ncl_in_file="%s" ncl_out_dir="%s" ncl_out_type="%s" ncl_loc_file="%s" %s""" % (ncl_in_file,ncl_out_dir, ncl_out_type, ncl_loc_file, script)
            # works on maestro
            #cmd  = """ncl 'ncl_in_file="%s"' 'ncl_out_dir="%s"' 'ncl_out_type="%s"' 'ncl_loc_file="%s"' %s """ % (ncl_in_file,ncl_out_dir, ncl_out_type, ncl_loc_file, script)
        #else:
            #cmd  = """ncl 'ncl_in_file="%s"' 'ncl_out_dir="%s"' 'ncl_out_type="%s"' 'ncl_loc_file="%s"' %s 2>&1 >> %s/ncl.log""" % (ncl_in_file,ncl_out_dir, ncl_out_type, ncl_loc_file, script, working_dir)
        env_vars={'NCL_OPT_FILE' : ncl_opt_file}
        ret = shared.run(cmd, config,env_vars=env_vars )
Beispiel #58
0
            bit_generator = "Wrapper32"
        streams[key] = TEMPLATE.render(
            streams=num_streams,
            kwargs=kwargs_repr,
            bit_generator=bit_generator,
            output=OUTPUT[bitgen],
            extra_initialization=extra_initialization,
            sequential=sequential,
        )
    return streams


if __name__ == "__main__":
    import argparse

    logger = get_logger("prng-tester")

    parser = argparse.ArgumentParser(
        description="Test alternative with bad seed values",
        formatter_class=argparse.ArgumentDefaultsHelpFormatter,
    )
    parser.add_argument(
        "-mt",
        "--multithreaded",
        action="store_true",
        help="Pass the --multithreaded flag in PractRand's RNG_Test.",
    )
    parser.add_argument(
        "-rt",
        "--run-tests",
        action="store_true",