def main():
    ENV = os.environ
    gsi_domain = ENV['GSI_DOMAIN'].lower()
    if gsi_domain != 'd02' and gsi_domain != 'd03':
        fail('Aborting: gsi_domain="%s" must be "d02" or "d03"' %
             (gsi_domain, ))

    import hwrf_expt
    hwrf_expt.init_module()
    logger = hwrf_expt.conf.log('exhwrf_gsi')

    if not hwrf_expt.conf.getbool('config', 'run_gsi'):
        jlogger.info('GSI is disabled.  This job need not be run.')
        sys.exit(0)

    if produtil.cluster.name() in ['gyre', 'tide']:
        hwrf_wcoss.set_vars_for_gsi(logger)
    else:
        logger.info('Not on WCOSS, so not setting WCOSS-specific vars.')

    if not hwrf.gsi.get_gsistatus(hwrf_expt.conf, 'gsi_' + gsi_domain, logger):
        jlogger.info('GSI is disabled for %s.  This job need not be run.' %
                     (gsi_domain, ))
        sys.exit(0)
    else:
        logger.info('GSI is enabled for %s.' % (gsi_domain, ))

    if gsi_domain == 'd02':
        hwrf_expt.gsi_d02.run()
    else:
        hwrf_expt.gsi_d03.run()
def main():
    environ_CONFhafs = os.environ.get('CONFhafs', 'NO_CONFhafs')
    conf = hafs.launcher.HAFSLauncher().read(environ_CONFhafs)
    if not conf.getstr('archive', 'fv3out', ''):
        jlogger.info(
            'No fv3out option in [archive] section.  Will not make fv3out archive.'
        )
        sys.exit(0)

    logger = conf.log()

    files = list()
    WORKhafs = conf.getdir('WORKhafs', '/can/not/find/WORKhafs/dir')
    forecastdir = os.path.join(WORKhafs, 'forecast')
    with produtil.cd.NamedDir(forecastdir):
        files.append(glob.glob('*.nc'))
        thearchive = conf.timestrinterp('archive', '{fv3out}', 0)
        if thearchive[0:5] != 'hpss:':
            logger.error('The fv3out archive path must begin with "hpss:": ' +
                         thearchive)
            sys.exit(1)
        thearchive = thearchive[5:]
        adir = os.path.dirname(thearchive)
        mkdir = batchexe(conf.getexe('hsi'))['-P', 'mkdir', '-p', adir]
        run(mkdir, logger=logger)
        cmd = batchexe(conf.getexe('htar'))['-vcpf', thearchive][files]
        checkrun(cmd, logger=logger)
Exemple #3
0
def main():
    import hwrf_expt
    hwrf_expt.init_module(make_ensemble_da=True)

    conf = hwrf_expt.conf
    run_ensemble_da = conf.getbool('config', 'run_ensemble_da')
    ensda_flag_file = conf.getstr('tdrcheck', 'tdr_flag_file')
    run_ensda = read_ensda_flag_file(ensda_flag_file)
    if run_ensemble_da and run_ensda:
        ensda_size = conf.getint('hwrf_da_ens', 'ensda_size')
    else:
        jlogger.info('ENSDA was not run.')
        ensda_size = 0

    logger = conf.log('output')

    bad = False
    for ens in xrange(ensda_size):
        imemb = ens + 1
        omemb = hwrf_expt.ensda.member(hwrf_expt.conf.cycle, imemb)
        for prod in omemb.products():
            if not prod.location:
                logger.warning('ensda %03d: No product: %s' % (
                    imemb,
                    prod.did,
                ))
                bad = True
            elif not prod.available:
                logger.warning(
                    'ensda %03d: product %s not available (location %s)' %
                    (imemb, repr(prod.did), repr(prod.location)))
                bad = True
            else:
                dest = '%s/%s.ensda_%03d.%s' % (
                    hwrf_expt.conf.getdir('com'),
                    hwrf_expt.conf.getstr('config', 'out_prefix'), imemb,
                    os.path.basename(prod.location))
                if not os.path.exists(dest):
                    logger.warning('ensda %03d: %s: does not exist' % (
                        imemb,
                        dest,
                    ))
                    bad = True
                elif os.path.getsize(dest) < 1:
                    logger.warning('ensda %03d: %s: is empty' % (imemb, dest))
                    bad = True
                else:
                    logger.info('ensda %03d: %s exists and is non-empty.' %
                                (imemb, dest))

    if bad:
        logger.critical(
            'HWRF data assimilation ensemble products are missing.')
        sys.exit(1)

    jlogger.info('Creating donefile.')
    donefile = os.path.join(conf.strinterp('config',
                                           '{com}/{stormlabel}.done'))
    with open(donefile, 'wt') as f:
        f.write('Cycle is complete.')
def main():
    import hwrf_expt
    hwrf_expt.init_module()
    conf = hwrf_expt.conf
    logger = conf.log('exhwrf_ocean_init')
    hwrf.mpipomtc.unset_ocstatus(conf, logger)
    try:
        if not conf.getbool('config', 'run_ocean'):
            jlogger.info('Ocean is disabled.  This job need not be run.')
            hwrf.mpipomtc.set_ocstatus(conf, False, logger)
            return
        hwrf_expt.pominit.run()
        hwrf.mpipomtc.set_ocstatus(conf, True, logger)
    except pom.exceptions.POMUnsupportedBasin as e:
        produtil.log.postmsg('Unsupported basin: will run without ocean.')
        hwrf.mpipomtc.set_ocstatus(conf, False, logger)
        return
    except Exception as e:
        if conf.getbool('config', 'allow_fallbacks', False):
            logger.error('Could not run ocean init: will run without ocean.'
                         '  Unhandled exception: ' + str(e),
                         exc_info=True)
            hwrf.mpipomtc.set_ocstatus(conf, False, logger)
            return
        raise
def products():
    jlogger.info(
        hwrf_expt.conf.strinterp(
            'config',
            '{stormlabel}: starting nhc_products job for {out_prefix}'))
    with NamedDir(hwrf_expt.WORKhwrf, logger=logging.getLogger()) as t:
        hwrf_expt.nhcp.run()
    jlogger.info(
        hwrf_expt.conf.strinterp(
            'config',
            '{stormlabel}: completed nhc_products job for {out_prefix}'))
def main():
    import hwrf_expt
    hwrf_expt.init_module(make_ensemble_da=True)

    conf=hwrf_expt.conf
    run_ensemble_da=conf.getbool('config','run_ensemble_da')
    if not run_ensemble_da:
        jlogger.info('ENSDA is disabled for this configuration.  '
                     'This job need not be run.')
        hwrf_expt.ensda_pre.write_flag_file(False)
        return
    hwrf_expt.ensda_pre.run()
def post():
    produtil.setup.setup()
    jlogger.info('starting post')
    import hwrf_expt
    hwrf_expt.init_module()

    run_copier = hwrf_expt.conf.getbool('config', 'post_runs_wrfcopier', False)
    run_satpost = hwrf_expt.conf.getbool('config', 'run_satpost', True)

    # Make sure we check all tasks to see if they're posted:
    hwrf_expt.nonsatpost.state = UNSTARTED
    hwrf_expt.satpost.state = UNSTARTED

    if run_copier:
        hwrf_expt.wrfcopier.state = UNSTARTED

    logger = logging.getLogger('exhwrf_post')

    # Change to a temp directory to run the  post:
    with NamedDir(hwrf_expt.WORKhwrf, logger=logger) as t:
        #hwrf_expt.ds.dump() # dump entire database state to stdout
        alldone = False
        while not alldone:
            before = int(time.time())
            if run_copier:
                if not done(hwrf_expt.wrfcopier):
                    hwrf_expt.wrfcopier.runpart()
            if not done(hwrf_expt.nonsatpost): hwrf_expt.nonsatpost.runpart()
            if not done(hwrf_expt.nonsatpost): hwrf_expt.nonsatpost.runpart()
            if run_satpost:
                if not done(hwrf_expt.satpost): hwrf_expt.satpost.runpart()
            if not done(hwrf_expt.nonsatpost): hwrf_expt.nonsatpost.runpart()
            alldone = ( done(hwrf_expt.satpost) or not run_satpost ) \
                and done(hwrf_expt.nonsatpost) \
                and ( not run_copier or done(hwrf_expt.wrfcopier) )
            after = int(time.time())
            took = after - before
            threshold = 5
            sleeptime = 20
            if took < threshold:
                logger.info(
                    'Post loop iteration took only %d seconds, which is '
                    'less than the threshold of %d seconds.  Will sleep '
                    '%d seconds.' % (took, threshold, sleeptime))
                time.sleep(sleeptime)
            else:
                logger.info('Post loop iteration took %d seconds, '
                            'which is above the threshold of %d.  '
                            'Sleeping only one second.' % (took, threshold))
                time.sleep(1)  # avoid thrash loop in case of logic error
            logger.info('Done sleeping.')

    jlogger.info('completed post')
Exemple #8
0
def prelaunch_wind(conf,logger):
    """!Disables vortexinit and GSI if the wind is below some threshold."""
    threshold=conf.getint('config','min_wind_for_init',0)
    wind=conf.syndat.wmax
    if wind<threshold:
        jlogger.info('Wind %d < %d so disabling GSI and relocation.'%(
                wind,threshold))
        logger.info('Wind %d<%d - run_gsi, run_vortexinit and run_ens_vortexinit overridden to "no"'%(
                wind,threshold))
        conf.set('config','run_gsi','no')
        conf.set('config','run_vortexinit','no')
        conf.set('config','run_ens_vortexinit','no')
def gribber():
    jlogger.info(
        hwrf_expt.conf.strinterp(
            'config',
            '{stormlabel}: starting regribbing job for {out_prefix}'))
    with NamedDir(hwrf_expt.WORKhwrf, logger=logging.getLogger()) as t:
        hwrf_expt.gribber.uncomplete()
        #hwrf_expt.gribber.unrun()
        hwrf_expt.gribber.run()
    jlogger.info(
        hwrf_expt.conf.strinterp(
            'config',
            '{stormlabel}: completed regribbing job for {out_prefix}'))
def main():
    ENV = os.environ
    init_model = ENV['INIT_MODEL'].lower()
    init_fhr = int(ENV.get('INIT_FHR', '0'))
    init_parts = ENV['INIT_PARTS'].lower()
    if init_model != 'gfs' and init_model != 'gdas1':
        fail('Aborting: init_model="%s" must be "gfs" or "gdas1"' %
             (init_model, ))
    if init_model == 'gdas1' and init_fhr < 1:
        fail(
            'Aborting: when init_model=gdas1, init_fhr must be > 1 (init_fhr=%d)'
            % (init_fhr, ))
    if init_model == 'gfs': init_fhr = 0

    import hwrf_expt
    hwrf_expt.init_module()
    os.chdir(hwrf_expt.conf.getdir('WORKhwrf'))
    if init_model == 'gfs':
        init = hwrf_expt.gfs_init
    elif not hwrf_expt.conf.getbool('config', 'run_gsi'):
        jlogger.info('GSI is disabled.  This job need not be run.')
        sys.exit(0)
    else:
        init = None
        logger = hwrf_expt.fgat_init.log()
        logger.info('search for fgat hour %d' % (init_fhr, ))
        for fhr, init in hwrf_expt.fgat_init.fhr_and_init():
            if abs(fhr - init_fhr) < 0.01:
                logger.info('fhr %d is init_fhr %d' % (fhr, init_fhr))
                #init.run()
                break
            else:
                logger.info('fhr %d is not init_fhr %d' % (fhr, init_fhr))
        assert (init is not None)

    if init_parts == 'parent':
        init.run_through_anl()
    elif init_parts == '3dvar':
        init.run_through_anl()
        init.run_init_after_anl()
    elif init_parts == 'bdy':
        init.run_real_bdy()
    elif init_parts == 'all':
        init.run_through_anl()
        init.run_init_after_anl()
        init.run_real_bdy()
    else:
        fail(
            'Aborting: invalid value of INIT_PARTS: "%s" (must be "parent," "3dvar" or "bdy")'
            % (init_parts, ))
Exemple #11
0
def main():

    import hwrf_expt
    hwrf_expt.init_module()
    conf = hwrf_expt.conf
    logger = conf.log('exhwrf_bufrprep')
    unset_gsistatus(conf, logger)

    if hwrf_expt.conf.getbool('config', 'run_gsi'):
        hwrf_expt.bufrprep.run()
    else:
        jlogger.info('GSI is disabled.  This job need not be run.')

    set_gsistatus(conf, logger)
def main():
    hwrf_expt.init_module()
    conf = hwrf_expt.conf
    if not conf.has_option('archive', 'wrfout'):
        jlogger.info(
            'No wrfout option in [archive] section.  Will not make wrfout archive.'
        )
        sys.exit(0)

    logger = conf.log()

    files = list()
    dt = hwrf.numerics.to_timedelta('6:00:00')
    t0 = conf.cycle
    wrf = hwrf_expt.runwrf.wrf()
    with produtil.cd.NamedDir(hwrf_expt.runwrf.location):
        for i in xrange(22):
            for dom in wrf:
                t = t0 + dt * i
                out = dom.get_output('auxhist3', t)
                if out is None:
                    out = dom.get_output('history', t)
                if out is None:
                    out = dom.get_output('auxhist2', t)
                if out is None:
                    logger.error('%s: could not determine wrfout for '
                                 'domain %s' %
                                 (t.strftime('%Y%m%d%H'), str(dom)))
                if not os.path.exists(out.path()):
                    logger.error('%s: does not exist' % (out.path(), ))
                if not produtil.fileop.isnonempty(out.path(), ):
                    logger.error('%s: is empty' % (out.path(), ))
                files.append(out.path())

        thearchive = conf.timestrinterp('archive', '{wrfout}', 0)
        if thearchive[0:5] != 'hpss:':
            logger.error('The wrfout archive path must begin with "hpss:": ' +
                         thearchive)
            sys.exit(1)
        thearchive = thearchive[5:]
        adir = os.path.dirname(thearchive)
        mkdir = exe(conf.getexe('hsi'))['-P', 'mkdir', '-p', adir]
        run(mkdir, logger=logger)
        cmd = exe(conf.getexe('htar'))['-cpf', thearchive][files]
        checkrun(cmd, logger=logger)
def tracker(n):
    jlogger.info(
        hwrf_expt.conf.strinterp(
            'config',
            '{stormlabel}: starting domain {dom} tracker job for {out_prefix}',
            dom=n))
    with NamedDir(hwrf_expt.WORKhwrf, logger=logging.getLogger()) as t:
        if n == 3:
            hwrf_expt.tracker.run()
        elif n == 2:
            hwrf_expt.trackerd02.run()
        elif n == 1:
            hwrf_expt.trackerd01.run()
    jlogger.info(
        hwrf_expt.conf.strinterp(
            'config', '{stormlabel}: completed domain {dom} tracker job '
            'for {out_prefix}',
            dom=n))
def main():
    import hwrf_expt
    hwrf_expt.init_module(make_ensemble_da=True)

    conf = hwrf_expt.conf
    cycle = hwrf_expt.conf.cycle
    input_catalog = conf.get('config', 'input_catalog')
    input_sources = conf.get('config', 'input_sources')
    logger = conf.log('exhwrf_input')
    WORKhwrf = conf.getdir('WORKhwrf')

    if input_catalog != 'hwrfdata':
        jlogger.info("Input catalog is %s, not \"hwrfdata\" so data should "
                     "be staged on disk already.  I have nothing to do, so "
                     "I'll just exit.  This is not an error.")
        sys.exit(0)

    # Make sure we're in the cycle's work directory, otherwise we might
    # pull archives and other big things to $HOME.
    produtil.fileop.chdir(WORKhwrf, logger=logger)

    # Figure out how to run htar:
    htar = exe(conf.getexe('htar'))

    # Figure out how to run hsi:
    hsi = exe(conf.getexe('hsi'))

    # Get the list of data to pull:
    data = list(d for d in hwrf_expt.inputiter())

    # Decide where to put the data:
    cat = hwrf.input.DataCatalog(conf, "hwrfdata", cycle)

    # Now pull the data:
    getem = hwrf.input.InputSource(conf,
                                   input_sources,
                                   conf.cycle,
                                   htar=htar,
                                   hsi=hsi,
                                   logger=logger)
    bad = not getem.get(data, cat)
    if bad:
        jlogger.error('Missing data in exhwrf_input.  Workflow may fail.')
        sys.exit(1)
Exemple #15
0
def main():
    hwrf_expt.init_module()
    logger = hwrf_expt.conf.log('exhwrf_gsi_post')

    if not hwrf_expt.conf.getbool('config', 'run_gsi'):
        jlogger.info('GSI is disabled.  This job need not be run.')
        sys.exit(0)

    produtil.fileop.chdir(hwrf_expt.conf.getdir('WORKhwrf'), logger=logger)

    logger.info('Unrun GSI post and gribber')
    hwrf_expt.gsipost.unrun()
    hwrf_expt.gsigribber.unrun()

    logger.info('Run GSI post')
    hwrf_expt.gsipost.run()

    logger.info('Run GSI gribber, and deliver to com.')
    hwrf_expt.gsigribber.run(now=True)
Exemple #16
0
def main():
    ENV=os.environ
    init_model=ENV.get('INIT_MODEL','GDAS1').lower()
    if init_model!='gfs' and init_model!='gdas1':
        fail('Aborting: init_model="%s" must be "gfs" or "gdas1"'
             %(init_model,))
    
    import hwrf_expt
    hwrf_expt.init_module()
    conf=hwrf_expt.conf
    logger=conf.log('exhwrf_merge')
    if init_model=='gfs':
        jlogger.info('MERGE does not need to be run for INIT_MODEL=GFS')
        hwrf_expt.gfs_merge.run()
    elif not hwrf_expt.conf.getbool('config','run_gsi'):
        jlogger.info('GSI is disabled via configuration settings.  '
                     'This job need not be run.')
        sys.exit(0)
    elif not hwrf.gsi.get_gsistatus(conf,'gsi_d02',logger) and \
         not hwrf.gsi.get_gsistatus(conf,'gsi_d03',logger):
        jlogger.info('GSI status file claims GSI is disabled for both '
                     'domains.  This job need not be run.')
        sys.exit(0)
    else:
        hwrf_expt.gdas_merge.run()
def main():
    logger = logging.getLogger('exhwrf_ensda')
    ENV = os.environ
    memb = ENV.get('ENSDA_MEMB', 'NOPE').lower()
    if memb == 'nope':
        fail('Aborting: you must specify ENSDA_MEMB')
    imemb = int(memb, 10)
    jlogger.info('HWRF ensda member %03d starting' % imemb)

    set_vars(logger)

    import hwrf_expt
    hwrf_expt.init_module(make_ensemble_da=True)
    omemb = hwrf_expt.ensda.member(hwrf_expt.conf.cycle, imemb)
    omemb.run()
    for prod in omemb.products():
        if not prod.location:
            logger.error('No product: %s' % (prod.did, ))
        elif not prod.available:
            logger.error('Product %s not available (location %s)' %
                         (repr(prod.did), repr(prod.location)))
        else:
            dest = '%s/%s.ensda_%03d.%s' % (hwrf_expt.conf.getdir('com'),
                                            hwrf_expt.conf.getstr(
                                                'config', 'out_prefix'), imemb,
                                            os.path.basename(prod.location))
            logger.info('%s %s: send to %s' %
                        (str(prod.did), repr(imemb), str(dest)))
            assert (os.path.isabs(dest))
            copier = hwrf_expt.wrfcopier.compression_copier(prod.location)
            if copier is None:
                logger.error('%s %s: not a NetCDF 3 file.' %
                             (str(prod.did), str(prod.location)))
                sys.exit(1)
            produtil.fileop.deliver_file(prod.location,
                                         dest,
                                         logger=logger,
                                         copier=copier)

    jlogger.info('HWRF ensda member %03d has completed' % imemb)
Exemple #18
0
def main():
    ENV = os.environ
    init_model = ENV['INIT_MODEL'].lower()
    init_fhr = int(ENV.get('INIT_FHR', '0'))
    if init_model != 'gfs' and init_model != 'gdas1':
        fail('Aborting: init_model="%s" must be "gfs" or "gdas1"' %
             (init_model, ))
    if init_model == 'gdas1' and init_fhr < 1:
        fail(
            'Aborting: when init_model=gdas1, init_fhr must be >= 1 (init_fhr=%d)'
            % (init_fhr, ))
    if init_model == 'gfs': init_fhr = 0

    import hwrf_expt
    hwrf_expt.init_module()
    if init_model == 'gfs':
        jlogger.info('HWRF relocation for GFS fhr starting')
        init = hwrf_expt.gfs_init.run_relocate()
        jlogger.info('HWRF relocation for GFS fhr completed')
    elif not hwrf_expt.conf.getbool('config', 'run_gsi'):
        jlogger.info('GSI is disabled.  This job need not be run.')
        sys.exit(0)
    else:
        init = None
        logger = hwrf_expt.fgat_init.log()
        logger.info('search for fgat hour %d' % (init_fhr, ))
        for fhr, init in hwrf_expt.fgat_init.fhr_and_init():
            if abs(fhr - init_fhr) < 0.01:
                logger.info('fhr %d is init_fhr %d' % (fhr, init_fhr))
                jlogger.info('HWRF relocation for GDAS1 fhr %d starting' % fhr)
                init.run_relocate()
                jlogger.info('HWRF relocation for GDAS1 fhr %d completed' %
                             fhr)
                break
            else:
                logger.info('fhr %d is not init_fhr %d' % (fhr, init_fhr))
        assert (init is not None)
def copier():
    post_runs_copier = hwrf_expt.conf.getbool('config', 'post_runs_wrfcopier',
                                              False)
    if not post_runs_copier:
        jlogger.info(
            hwrf_expt.conf.strinterp(
                'config',
                '{stormlabel}: starting wrfcopier job for {out_prefix}'))
        with NamedDir(hwrf_expt.WORKhwrf, logger=logging.getLogger()) as t:
            hwrf_expt.wrfcopier.run(check_all=True)
        jlogger.info(
            hwrf_expt.conf.strinterp(
                'config',
                '{stormlabel}: completed wrfcopier job for {out_prefix}'))
    else:
        jlogger.info('Products job will not run wrfcopier, post will do it.')
    gribber()
Exemple #20
0
             %(init_model,))
    
    import hwrf_expt
    hwrf_expt.init_module()
    conf=hwrf_expt.conf
    logger=conf.log('exhwrf_merge')
    if init_model=='gfs':
        jlogger.info('MERGE does not need to be run for INIT_MODEL=GFS')
        hwrf_expt.gfs_merge.run()
    elif not hwrf_expt.conf.getbool('config','run_gsi'):
        jlogger.info('GSI is disabled via configuration settings.  '
                     'This job need not be run.')
        sys.exit(0)
    elif not hwrf.gsi.get_gsistatus(conf,'gsi_d02',logger) and \
         not hwrf.gsi.get_gsistatus(conf,'gsi_d03',logger):
        jlogger.info('GSI status file claims GSI is disabled for both '
                     'domains.  This job need not be run.')
        sys.exit(0)
    else:
        hwrf_expt.gdas_merge.run()

if __name__=='__main__':
    try:
        produtil.setup.setup()
        jlogger.info('exhwrf_merge is starting')
        main()
        jlogger.info('exhwrf_merge has completed')
    except Exception as e:
        jlogger.critical('HWRF merge is aborting: '+str(e),exc_info=True)
        sys.exit(2)
    logger = conf.log()

    files = list()
    WORKhafs = conf.getdir('WORKhafs', '/can/not/find/WORKhafs/dir')
    forecastdir = os.path.join(WORKhafs, 'forecast')
    with produtil.cd.NamedDir(forecastdir):
        files.append(glob.glob('*.nc'))
        thearchive = conf.timestrinterp('archive', '{fv3out}', 0)
        if thearchive[0:5] != 'hpss:':
            logger.error('The fv3out archive path must begin with "hpss:": ' +
                         thearchive)
            sys.exit(1)
        thearchive = thearchive[5:]
        adir = os.path.dirname(thearchive)
        mkdir = batchexe(conf.getexe('hsi'))['-P', 'mkdir', '-p', adir]
        run(mkdir, logger=logger)
        cmd = batchexe(conf.getexe('htar'))['-vcpf', thearchive][files]
        checkrun(cmd, logger=logger)


if __name__ == '__main__':
    try:
        produtil.setup.setup()
        jlogger.info("HAFS fv3out_archive job starting")
        main()
        jlogger.info("HAFS fv3out_archive job completed")
    except Exception as e:
        jlogger.critical('HAFS rundir archive is aborting: ' + str(e),
                         exc_info=True)
        sys.exit(2)
    hsi = exe(conf.getexe('hsi'))

    # Get the list of data to pull:
    data = list(d for d in hwrf_expt.inputiter())

    # Decide where to put the data:
    cat = hwrf.input.DataCatalog(conf, "hwrfdata", cycle)

    # Now pull the data:
    getem = hwrf.input.InputSource(conf,
                                   input_sources,
                                   conf.cycle,
                                   htar=htar,
                                   hsi=hsi,
                                   logger=logger)
    bad = not getem.get(data, cat)
    if bad:
        jlogger.error('Missing data in exhwrf_input.  Workflow may fail.')
        sys.exit(1)


if __name__ == '__main__':
    try:
        produtil.setup.setup(thread_logger=True, eloglevel=logging.INFO)
        jlogger.info("HWRF input job starting")
        main()
        jlogger.info("HWRF input job completed")
    except Exception as e:
        jlogger.critical('HWRF input is aborting: ' + str(e), exc_info=True)
        sys.exit(2)
Exemple #23
0
def main():
    import hwrf_expt
    hwrf_expt.init_module(make_ensemble_da=True)
    # Make sure DBN alerts and other such things are triggered:
    hwrf_alerts.add_nhc_alerts()
    hwrf_alerts.add_regrib_alerts()
    hwrf_alerts.add_wave_alerts()

    global copier
    copier = hwrf_expt.wrfcopier.compression_copier

    if 'NO' == os.environ.get('PARAFLAG', 'YES'):
        jlogger.info(
            'Calling email_afos_to_sdm from output job to email the track.')
        afos = hwrf_expt.nhcp.product('afos')
        hwrf_alerts.email_afos_to_sdm(afos)
        jlogger.info(
            'Done with email_afos_to_sdm.  Will now celebrate by delivering many things to COM.'
        )

    conf = hwrf_expt.conf
    relocation = conf.getbool('config', 'run_relocation', True)
    coupled = conf.getbool('config', 'run_ocean', True)
    GSI = conf.getbool('config', 'run_gsi')
    run_ensemble_da = conf.getbool('config', 'run_ensemble_da', False)
    extra_trackers = conf.getbool('config', 'extra_trackers', False)
    fcstlen = conf.getint('config', 'forecast_length', 126)

    logger = conf.log('output')

    if coupled and not hwrf.mpipomtc.get_ocstatus(conf, logger):
        coupled = False

    hwrf_expt.wrfcopier.run()

    D = Deliverer(logger, conf)
    D['wrfdir'] = hwrf_expt.runwrf.workdir
    D.deliver_file('{WORKhwrf}/tmpvit', '{out_prefix}.storm_vit')

    if GSI:
        D['gsi_d02'] = hwrf_expt.gsi_d02.outdir
        if hwrf_expt.gsid03_flag:
            D['gsi_d03'] = hwrf_expt.gsi_d03.outdir

    logger.info('WRF run directory is %s' % (repr(D['wrfdir']), ))

    D.deliver_file('{WORKhwrf}/jlogfile', optional=True)

    d01 = hwrf_expt.moad
    d02 = hwrf_expt.storm1outer
    d03 = hwrf_expt.storm1inner

    if coupled:
        D.deliver_file('{wrfdir}/MDstatus', optional=True)
        for ocrest in ('el_initial.nc', 'grid.nc', 'ts_clim.nc',
                       'ts_initial.nc', 'uv_initial.nc'):
            D.deliver_file('{wrfdir}/{vit[stormname]}.{ocrest}',
                           '{out_prefix}.pom.{ocrest}',
                           ocrest=ocrest)

        for iday in xrange(int(math.floor(fcstlen / 24.0 + 0.01))):
            ocrest = "%04d.nc" % iday
            D.deliver_file('{wrfdir}/{vit[stormname]}.{ocrest}',
                           '{out_prefix}.pom.{ocrest}',
                           ocrest=ocrest)

    logcount = 0
    for ext in ('log', 'out', 'err'):
        globme = conf.getdir('WORKhwrf') + '/*.' + ext
        logger.info('Globbing for %s log files' % (globme, ))
        for log in glob.glob(globme):
            logcount += 1
            D.deliver_file(log)
    logger.info('Found %d log file(s)' % (logcount, ))

    # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
    # Deliver GSI stuff next.
    # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
    gsi_flag = conf.getbool('config', 'run_gsi')
    gsiop = True
    if gsi_flag:
        gsiop = ((not hwrf_expt.gsi_d02.completed)
                 or (not hwrf_expt.gsi_d03.completed))
        if gsiop:
            logger.warning('GSI failed, so all GSI products are optional.')
        else:
            logger.info('GSI ran, so its products are mandatory.')
    if GSI:
        # Copy the original wrfinput file before DA:
        org_d01 = hwrf_expt.gfs_init.realinit.wrfinput_at_time(
            hwrf_expt.cycle, d01)
        D.deliver_file(org_d01, '{out_prefix}.wrforg_d01', optional=gsiop)

    if GSI:
        # Get the FGAT initialization at the analysis time:
        ceninit = hwrf_expt.fgat_init.init_at_time(hwrf_expt.conf.cycle)

        # Copy the original wrfanl files before relocation:
        org_d02 = ceninit.runwrfanl.wrfanl_at_time(hwrf_expt.conf.cycle, d02)
        org_d03 = ceninit.runwrfanl.wrfanl_at_time(hwrf_expt.conf.cycle, d03)
        D.deliver_file(org_d02, '{out_prefix}.wrforg_d02', optional=gsiop)
        D.deliver_file(org_d03, '{out_prefix}.wrforg_d03', optional=gsiop)

        if relocation:
            # Copy the wrfanl files after relocation, but before GSI:
            ges_d02 = ceninit.rstage3.wrfanl_at_time(hwrf_expt.conf.cycle, d02)
            ges_d03 = ceninit.rstage3.wrfanl_at_time(hwrf_expt.conf.cycle, d03)
            D.deliver_file(ges_d02, '{out_prefix}.wrfges_d02', optional=gsiop)
            D.deliver_file(ges_d03, '{out_prefix}.wrfges_d03', optional=gsiop)

    # for domain in hwrf_expt.gfs_init.runwrfanl.sim:
    #     if not domain.is_moad():
    #         org_prod=hwrf_expt.gfs_init.runwrfanl.wrfanl_at_time(
    #             hwrf_expt.cycle,domain)
    #         D.deliver_file(org_prod,'{out_prefix}.wrfanl_d{gid:02d}_org',
    #                      gid=int(domain.get_grid_id()))

    if GSI:
        D.deliver_file('{gsi_d02}/satbias_out',
                       '{out_prefix}.gsi_cvs2.biascr',
                       optional=gsiop)
        if hwrf_expt.gsid03_flag:
            D.deliver_file('{gsi_d03}/satbias_out',
                           '{out_prefix}.gsi_cvs3.biascr',
                           optional=gsiop)

    # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
    # Lastly, deliver the diag files
    # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
    jlogger.info('Delivering wrfdiag files to com.')
    hwrf_expt.nhcp.deliver_wrfdiag()

    if D.failures > 0:
        jlogger.critical(
            'HWRF: unable to deliver %d non-optional products to com.' %
            int(D.failures))
        sys.exit(1)

    # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
    # Deliver things to noscrub for non-NCO runs  # # # # # # # # # # #
    # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
    if conf.getbool('config', 'PARAFLAG'):
        logger.info('You are not NCO, so I will deliver files to noscrub.')
    else:
        logger.info('You are NCO so I will skip NOSCRUB deliveries.')

    D.reset()

    def fromcom(workpath, compath, optional=False):
        D.deliver_file(workpath, compath, from_com=True, optional=optional)

    def havedir(sdir):
        there = conf.get('dir', sdir, 'NOPE')
        if there == 'NOPE':
            return False
        produtil.fileop.makedirs(there)
        return True

    if havedir('outatcf'):
        fromcom('{outatcf}', '{out_prefix}.trak.hwrf.atcfunix')
    if havedir('outdiag'):
        fromcom('{outdiag}', '{out_prefix}.trak.hwrf.3hourly*')
        fromcom('{outdiag}', '{out_prefix}*resolution', True)
        fromcom('{outdiag}', '{out_prefix}*htcf*stats', True)
        fromcom('{outdiag}', '{out_prefix}*htcf', True)
        fromcom('{outdiag}', 'a*.dat')
        fromcom('{outdiag}', '{out_prefix}.stats.tpc', optional=True)
        if extra_trackers:
            fromcom('{outdiag}', '{com}/{out_prefix}.trak.hwrfd01.atcfunix')
            fromcom('{outdiag}', '{com}/{out_prefix}.trak.hwrfd02.atcfunix')
    if havedir('outships'):
        fromcom('{outships}', 'figures/*.txt', optional=True)
    if havedir('outstatus'):
        fromcom('{outstatus}', '{WORKhwrf}/submit.out', optional=True)
        timings = conf.strinterp('config', '{outstatus}/{out_prefix}.timings')
        inout = conf.strinterp('config', '{WORKhwrf}/hwrf_*.out')
        with open(timings, 'wt') as outf:
            for inoutfile in glob.glob(inout):
                if not os.path.exists(inoutfile):
                    logger.warning('%s: file does not exist; skipping' %
                                   (inoutfile, ))
                with open(inoutfile, 'rt') as inf:
                    for line in inf:
                        if line.find('TIMING') >= 0:
                            print >> outf, line.rstrip()
    if havedir('outatcfcorrected'):
        inatcf = conf.strinterp('config',
                                '{com}/{out_prefix}.trak.hwrf.atcfunix')
        outatcf = conf.strinterp(
            'config', '{outatcfcorrected}/{out_prefix}.trak.hwrf.atcfunix')
        hwrf.tracker.jtwc_rewrite(inatcf, outatcf, logger)

    ####################################################################

    # Create the "done file" if ensda is entirely disabled.  This is
    # used by the workflow layer to know when the cycle is entirely
    # complete, and can be deleted.

    # NOTE FOR FUTURE DEVELOPMENT: When the graphics are added to the
    # workflow, we will need to move the creation of this "done file"
    # to a later step, after the graphics.  The logical candidate
    # would be a new job whose purpose is to check the cycle's entire
    # workflow to make sure it is finished.
    make_done = True
    if run_ensemble_da:
        flag_file = conf.strinterp('tdrcheck', '{tdr_flag_file}')
        try:
            ensda_flag = hwrf.ensda.read_ensda_flag_file(flag_file)
        except (EnvironmentError) as e:
            logger.error('%s: unable to get ensda_flag; assume False: %s' %
                         (flag_file, str(e)),
                         exc_info=True)
            ensda_flag = False

        if ensda_flag:
            jlogger.info(
                'Not creating donefile: ensda_output will do it instead.')
            make_done = not ensda_flag
        else:
            jlogger.info('ensda disabled: make donefile now')

    if make_done:
        donefile = os.path.join(
            conf.strinterp('config', '{com}/{stormlabel}.done'))
        with open(donefile, 'wt') as f:
            f.write('Cycle is complete.')
Exemple #24
0
            logger.error('%s: unable to get ensda_flag; assume False: %s' %
                         (flag_file, str(e)),
                         exc_info=True)
            ensda_flag = False

        if ensda_flag:
            jlogger.info(
                'Not creating donefile: ensda_output will do it instead.')
            make_done = not ensda_flag
        else:
            jlogger.info('ensda disabled: make donefile now')

    if make_done:
        donefile = os.path.join(
            conf.strinterp('config', '{com}/{stormlabel}.done'))
        with open(donefile, 'wt') as f:
            f.write('Cycle is complete.')


########################################################################

if __name__ == '__main__':
    try:
        produtil.setup.setup()
        jlogger.info('hwrf_output is starting')
        main()
        jlogger.info('hwrf_output has completed')
    except Exception as e:
        jlogger.critical('hwrf_output is aborting: ' + str(e), exc_info=True)
        sys.exit(2)
"""This script determines whether the ENSDA needs to be run for this
cycle."""

import sys, os
import produtil.setup, produtil.log
from produtil.log import jlogger

def main():
    import hwrf_expt
    hwrf_expt.init_module(make_ensemble_da=True)

    conf=hwrf_expt.conf
    run_ensemble_da=conf.getbool('config','run_ensemble_da')
    if not run_ensemble_da:
        jlogger.info('ENSDA is disabled for this configuration.  '
                     'This job need not be run.')
        hwrf_expt.ensda_pre.write_flag_file(False)
        return
    hwrf_expt.ensda_pre.run()

if __name__=='__main__':
    try:
        produtil.setup.setup()
        jlogger.info('ensda_pre is starting')
        main()
        jlogger.info('ensda_pre is completed')
    except Exception as e:
        jlogger.critical('ensda_pre is aborting: '+str(e),exc_info=True)
        sys.exit(2)

                                 (t.strftime('%Y%m%d%H'), str(dom)))
                if not os.path.exists(out.path()):
                    logger.error('%s: does not exist' % (out.path(), ))
                if not produtil.fileop.isnonempty(out.path(), ):
                    logger.error('%s: is empty' % (out.path(), ))
                files.append(out.path())

        thearchive = conf.timestrinterp('archive', '{wrfout}', 0)
        if thearchive[0:5] != 'hpss:':
            logger.error('The wrfout archive path must begin with "hpss:": ' +
                         thearchive)
            sys.exit(1)
        thearchive = thearchive[5:]
        adir = os.path.dirname(thearchive)
        mkdir = exe(conf.getexe('hsi'))['-P', 'mkdir', '-p', adir]
        run(mkdir, logger=logger)
        cmd = exe(conf.getexe('htar'))['-cpf', thearchive][files]
        checkrun(cmd, logger=logger)


if __name__ == '__main__':
    try:
        produtil.setup.setup()
        jlogger.info("HWRF rundir archive job starting")
        main()
        jlogger.info("HWRF rundir archive job completed")
    except Exception as e:
        jlogger.critical('HWRF rundir archive is aborting: ' + str(e),
                         exc_info=True)
        sys.exit(2)
Exemple #27
0
def main():
    hwrf_expt.init_module()
    logger = hwrf_expt.conf.log('exhwrf_gsi_post')

    if not hwrf_expt.conf.getbool('config', 'run_gsi'):
        jlogger.info('GSI is disabled.  This job need not be run.')
        sys.exit(0)

    produtil.fileop.chdir(hwrf_expt.conf.getdir('WORKhwrf'), logger=logger)

    logger.info('Unrun GSI post and gribber')
    hwrf_expt.gsipost.unrun()
    hwrf_expt.gsigribber.unrun()

    logger.info('Run GSI post')
    hwrf_expt.gsipost.run()

    logger.info('Run GSI gribber, and deliver to com.')
    hwrf_expt.gsigribber.run(now=True)


if __name__ == '__main__':
    try:
        produtil.setup.setup()
        jlogger.info("HWRF GSI post job starting")
        main()
        jlogger.info("HWRF GSI post job completed")
    except Exception as e:
        jlogger.critical('HWRF GSI post is aborting: ' + str(e), exc_info=True)
        sys.exit(2)
Exemple #28
0
                    ))
                    bad = True
                elif os.path.getsize(dest) < 1:
                    logger.warning('ensda %03d: %s: is empty' % (imemb, dest))
                    bad = True
                else:
                    logger.info('ensda %03d: %s exists and is non-empty.' %
                                (imemb, dest))

    if bad:
        logger.critical(
            'HWRF data assimilation ensemble products are missing.')
        sys.exit(1)

    jlogger.info('Creating donefile.')
    donefile = os.path.join(conf.strinterp('config',
                                           '{com}/{stormlabel}.done'))
    with open(donefile, 'wt') as f:
        f.write('Cycle is complete.')


if __name__ == '__main__':
    try:
        produtil.setup.setup()
        jlogger.info('ensda_output is starting')
        main()
        jlogger.info('ensda_output is completed')
    except Exception as e:
        jlogger.critical('ensda_output is aborting: ' + str(e), exc_info=True)
        sys.exit(2)