Beispiel #1
0
def prepare_wps(config):
    """ Runs all the pre-processing steps necessary for running WPS.
    
    Reads the current value of init_time from config, and links 
    boundary condition files into correct directory. Creates an output
    directory for the met_em files.
    
    Arguments:
    config -- dictionary containing various configuration options"""

    logger = shared.get_logger()
    logger.debug('*** PREPARING FILES FOR WPS ***')

    wps_dir = config['wps_dir']  # the base installation of WPS
    wps_run_dir = config['wps_run_dir']  # the directory to run WPS from
    working_dir = config['working_dir']  # model run directory
    met_em_dir = config['met_em_dir']
    init_time = config['init_time']

    grb_input_fmt = config['grb_input_fmt']
    vtable = config['vtable']
    bdy_times = shared.get_bdy_times(config)

    if type(grb_input_fmt) == type({}):
        logger.debug(grb_input_fmt)
        fmts = grb_input_fmt.values()

    else:
        fmts = [grb_input_fmt]

    for fmt in fmts:
        #
        # Generate filelist based on the initial time, and the forecast hour
        #
        filelist = shared.get_bdy_filenames(fmt, bdy_times)

        #
        # Check the boundary files exist
        #
        logger.debug('checking boundary condition files exists')
        for f in filelist:
            if not os.path.exists(f):
                raise IOError('cannot find file: %s' % f)

    logger.debug('all boundary conditions files exist')

    #
    # Run the link_grib scipt to link the FNL files
    #
    logger.debug(
        'running link_grib.csh script to link grib files to GRIBFILE.AAA etc')
    os.chdir(wps_run_dir)
    args = ' '.join(filelist)
    cmd = '%s/link_grib.csh %s' % (wps_run_dir, args)
    shared.run(cmd, config)

    logger.debug('Path for met_em files is %s' % met_em_dir)
    if not os.path.exists(met_em_dir):
        os.makedirs(met_em_dir)

    logger.debug('*** FINISHED PREPARING FILES FOR WPS ***')
Beispiel #2
0
def prepare_wps(config):
    """ Runs all the pre-processing steps necessary for running WPS.
    
    Reads the current value of init_time from config, and links 
    boundary condition files into correct directory. Creates an output
    directory for the met_em files.
    
    Arguments:
    config -- dictionary containing various configuration options"""
    
    logger       = shared.get_logger()
    logger.debug('*** PREPARING FILES FOR WPS ***')
    
    wps_dir       = config['wps_dir']          # the base installation of WPS
    wps_run_dir   = config['wps_run_dir']      # the directory to run WPS from
    working_dir = config['working_dir']    # model run directory 
    met_em_dir    = config['met_em_dir']
    init_time     = config['init_time']

    
    grb_input_fmt = config['grb_input_fmt']
    vtable        = config['vtable']
    bdy_times     = shared.get_bdy_times(config)

    if type(grb_input_fmt)==type({}):
        logger.debug(grb_input_fmt)
        fmts = grb_input_fmt.values()
        
    else:
        fmts = [grb_input_fmt]
    
    
    for fmt in fmts:
        #
        # Generate filelist based on the initial time, and the forecast hour
        #        
        filelist = shared.get_bdy_filenames(fmt, bdy_times)

        #
        # Check the boundary files exist
        #
        logger.debug('checking boundary condition files exists')    
        for f in filelist:
            if not os.path.exists(f):
                raise IOError('cannot find file: %s' %f)
        
    logger.debug('all boundary conditions files exist')
    
    #
    # Run the link_grib scipt to link the FNL files
    #
    logger.debug('running link_grib.csh script to link grib files to GRIBFILE.AAA etc')
    os.chdir(wps_run_dir)
    args = ' '.join(filelist)
    cmd = '%s/link_grib.csh %s' %(wps_run_dir,args)
    shared.run(cmd, config)

    logger.debug('Path for met_em files is %s' % met_em_dir)
    if not os.path.exists(met_em_dir):
        os.makedirs(met_em_dir)

   
    logger.debug('*** FINISHED PREPARING FILES FOR WPS ***')    
Beispiel #3
0
def run_ungrib(config):
    """ Runs ungrib.exe and checks output was sucessfull
    If vtable and gbr_input_fmt are NOT dictionaries, 
    then dictionarius will be constructed from them using 
    the key bdy_conditions from the metadata
    
    Arguments:
    config -- dictionary specifying configuration options
    
    """
    logger = shared.get_logger()
    wps_dir = config['wps_dir']
    wps_run_dir = config['wps_run_dir']
    namelist_wps = config['namelist_wps']
    working_dir = config['working_dir']
    met_em_dir = config['met_em_dir']
    init_time = config['init_time']
    log_file = '%s/ungrib.log' % wps_run_dir
    vtable = config['vtable']
    grb_input_fmt = config['grb_input_fmt']
    grb_input_delay = config.get(
        "grb_input_delay")  # this allows None to be returned

    bdy_conditions = config['bdy_conditions']

    logger.info("\n*** RUNNING UNGRIB ***")

    namelist = shared.read_namelist(namelist_wps)

    bdy_times = shared.get_bdy_times(config)

    if type(grb_input_fmt) != type({}):
        grb_input_fmt = {bdy_conditions: grb_input_fmt}

    if type(vtable) != type({}):
        vtable = {bdy_conditions: vtable}

    #
    # Check that boundary conditions exist
    #
    for key in vtable.keys():

        if grb_input_delay and key in grb_input_delay:
            logger.debug("applying delay")
            delay = datetime.timedelta(0, grb_input_delay[key] * 60 * 60)
            new_bdy_times = [b - delay for b in bdy_times]
        else:
            logger.debug("no delay applied")
            new_bdy_times = bdy_times

        fmt = grb_input_fmt[key]
        #
        # Generate filelist based on the initial time, and the forecast hour
        #
        filelist = list(
            OrderedDict.fromkeys(shared.get_bdy_filenames(fmt, new_bdy_times)))

        #
        # Check the boundary files exist
        #
        logger.debug('checking boundary condition files exists')
        for f in filelist:
            if not os.path.exists(f):
                raise IOError('cannot find file: %s' % f)

    logger.debug('all boundary conditions files exist')

    #
    # Now process boundary conditions
    #
    for key in vtable.keys():

        if grb_input_delay and key in grb_input_delay:
            logger.debug("applying delay")
            delay = datetime.timedelta(0, grb_input_delay[key] * 60 * 60)
            new_bdy_times = [b - delay for b in bdy_times]
        else:
            logger.debug("no delay applied")
            new_bdy_times = bdy_times

        fmt = grb_input_fmt[key]
        #
        # Generate filelist based on the initial time, and the forecast hour
        #
        filelist = list(
            OrderedDict.fromkeys(shared.get_bdy_filenames(fmt, new_bdy_times)))

        logger.debug(
            'running link_grib.csh script to link grib files to GRIBFILE.AAA etc'
        )

        os.chdir(wps_run_dir)
        args = ' '.join(filelist)
        cmd = '%s/link_grib.csh %s' % (wps_run_dir, args)
        shared.run_cmd(cmd, config)

        vtab_path = vtable[key]
        prefix = key
        namelist.update('prefix', key)
        namelist.to_file(namelist_wps)
        link_namelist_wps(config)
        vtab_wps = wps_run_dir + '/Vtable'

        if os.path.exists(vtab_wps):
            os.remove(vtab_wps)
        cmd = 'ln -sf %s %s' % (vtab_path, vtab_wps)
        logger.debug(cmd)
        subprocess.call(cmd, shell=True)
        #logger.debug("changing directory to %s" % wps_run_dir)
        #os.chdir(wps_run_dir)
        cmd = '%s/ungrib.exe' % wps_run_dir

        logger.debug(cmd)
        shared.run(cmd, config, wps_run_dir)

        cmd = 'grep "Successful completion" %s/ungrib.log*' % wps_run_dir  # check for success
        ret = shared.run_cmd(cmd, config)
        if ret != 0:
            raise IOError('ungrib.exe did not complete')

    logger.info('*** SUCESS UNGRIB ***\n')
Beispiel #4
0
def run_ungrib(config):
    """ Runs ungrib.exe and checks output was sucessfull
    If vtable and gbr_input_fmt are NOT dictionaries, 
    then dictionarius will be constructed from them using 
    the key bdy_conditions from the metadata
    
    Arguments:
    config -- dictionary specifying configuration options
    
    """
    logger        =shared.get_logger()
    wps_dir       = config['wps_dir']
    wps_run_dir   = config['wps_run_dir']
    namelist_wps  = config['namelist_wps']
    working_dir   = config['working_dir']    
    met_em_dir    = config['met_em_dir']
    init_time     = config['init_time']
    log_file      = '%s/ungrib.log' % wps_run_dir
    vtable        = config['vtable']
    grb_input_fmt  = config['grb_input_fmt']
    grb_input_delay = config.get("grb_input_delay")  # this allows None to be returned 
    
    bdy_conditions = config['bdy_conditions']
    
    
    
    
    
    logger.info("\n*** RUNNING UNGRIB ***")
    
    namelist = shared.read_namelist(namelist_wps)
    
    bdy_times     = shared.get_bdy_times(config)
    

    if type(grb_input_fmt)!=type({}):
        grb_input_fmt = {bdy_conditions:grb_input_fmt}

    if type(vtable)!=type({}):
        vtable = {bdy_conditions:vtable}


    #
    # Check that boundary conditions exist
    #     
    for key in vtable.keys():
        
        
        if grb_input_delay and key in grb_input_delay:
            logger.debug("applying delay")
            delay = datetime.timedelta(0, grb_input_delay[key]*60*60)
            new_bdy_times = [b - delay for b in bdy_times]
        else:
            logger.debug("no delay applied")
            new_bdy_times = bdy_times
        
        fmt = grb_input_fmt[key]
        #
        # Generate filelist based on the initial time, and the forecast hour
        #        
        filelist = list(OrderedDict.fromkeys(shared.get_bdy_filenames(fmt, new_bdy_times)))

        #
        # Check the boundary files exist
        #
        logger.debug('checking boundary condition files exists')    
        for f in filelist:
            if not os.path.exists(f):
                raise IOError('cannot find file: %s' %f)
        
    
    
    logger.debug('all boundary conditions files exist')
    
    #
    # Now process boundary conditions
    #
    for key in vtable.keys():

        if grb_input_delay and key in grb_input_delay:
            logger.debug("applying delay")
            delay = datetime.timedelta(0, grb_input_delay[key]*60*60)
            new_bdy_times = [b - delay for b in bdy_times]
        else:
            logger.debug("no delay applied")
            new_bdy_times = bdy_times
        
        fmt = grb_input_fmt[key]
        #
        # Generate filelist based on the initial time, and the forecast hour
        #        
        filelist = list(OrderedDict.fromkeys(shared.get_bdy_filenames(fmt, new_bdy_times)))

        
        logger.debug('running link_grib.csh script to link grib files to GRIBFILE.AAA etc')
        
        os.chdir(wps_run_dir)
        args = ' '.join(filelist)
        cmd = '%s/link_grib.csh %s' %(wps_run_dir,args)
        shared.run_cmd(cmd, config)
  
        vtab_path = vtable[key]
        prefix = key
        namelist.update('prefix', key)
        namelist.to_file(namelist_wps)
        link_namelist_wps(config)
        vtab_wps  = wps_run_dir+'/Vtable'

        if os.path.exists(vtab_wps):
            os.remove(vtab_wps)
        cmd = 'ln -sf %s %s' %(vtab_path, vtab_wps)
        logger.debug(cmd)
        subprocess.call(cmd, shell=True)    
        #logger.debug("changing directory to %s" % wps_run_dir)
        #os.chdir(wps_run_dir)
        cmd     =  '%s/ungrib.exe' % wps_run_dir
        
        logger.debug(cmd)
        shared.run(cmd, config, wps_run_dir)

        cmd = 'grep "Successful completion" %s/ungrib.log*' % wps_run_dir # check for success
        ret =shared.run_cmd(cmd,config)
        if ret!=0:
            raise IOError('ungrib.exe did not complete')
    
    logger.info('*** SUCESS UNGRIB ***\n')