コード例 #1
0
	def download_gfs(start_time, forecast_hour):
		dir_name = f'{prefix}.{start_time.format("YYYYMMDD")}/{start_time.format("HH")}'
		file_name = '{}.t{:02d}z.pgrb2.{}.f{:03d}'.format(prefix, start_time.hour, resolution, forecast_hour)
		url = f'{root_url}/{dir_name}/{file_name}'
		if not os.path.isdir(f'{output_root}/{dir_name}'):
			os.makedirs(f'{output_root}/{dir_name}')
			cli.notice(f'Create directory {output_root}/{dir_name}.')
		cli.notice(f'Downloading {url}.')
		local_file_path = f'{output_root}/{dir_name}/{file_name}'
		if is_downloading(local_file_path):
			cli.warning(f'Skip downloading {local_file_path}.')
			return
		if os.path.isfile(local_file_path):
			if check_file_size(url, local_file_path):
				cli.notice(f'File {local_file_path} exists.')
				return
			else:
				# File is not downloaded completely.
				os.remove(local_file_path)
		try:
			subprocess.call(['curl', '-C', '-', '-o', local_file_path, url])
		except Exception as e:
			cli.error('Encounter exception {e}!')
		if not check_file_size(url, local_file_path):
			os.remove(local_file_path)
			cli.error(f'Failed to download {file_name}!')
コード例 #2
0
def run_wps_geogrid(work_root, wps_root, config, args):
    wps_work_dir = os.path.abspath(work_root) + '/wps'
    if not os.path.isdir(wps_work_dir): os.mkdir(wps_work_dir)
    os.chdir(wps_work_dir)

    cli.notice(f'Run geogrid.exe at {wps_work_dir} ...')
    if not os.path.isfile('GEOGRID.TBL'):
        run(f'ln -sf {wps_root}/geogrid/GEOGRID.TBL.ARW {wps_work_dir}/GEOGRID.TBL'
            )
    expected_files = [
        'geo_em.d{:02d}.nc'.format(i + 1)
        for i in range(config['domains']['max_dom'])
    ]
    if not check_files(expected_files):
        run('rm -f geo_em.d*.nc')
        submit_job(f'{wps_root}/geogrid/src/geogrid.exe',
                   args.np,
                   config,
                   args,
                   logfile='geogrid.log.0000',
                   wait=True)
        if not check_files(expected_files):
            cli.error(
                f'Failed! Check output {os.path.abspath(wps_work_dir)}/geogrid.out.0000'
            )
        cli.notice('Succeeded.')
    else:
        cli.notice('File geo_em.*.nc already exist.')
    run(f'ls -l {wps_work_dir}/geo_em.*.nc')
コード例 #3
0
def download_gfs(datetime, forecast):
	remote_file_path_pattern = file_path_pattern.format(datetime.format('YYYYMMDD'), datetime.hour, forecast)
	remote_dir = os.path.dirname(remote_file_path_pattern)
	local_dir = f'{args.root_dir}/{datetime.format("YYYYMMDDHH")}'
	try:
		remote_file_path = sorted(ftp_list(ftp, remote_file_path_pattern, connect))[-1]
	except IndexError:
		cli.error(f'Failed to get remote_file_path with pattern {remote_file_path_pattern}!')
	ftp_get(ftp, remote_file_path, local_dir, connect, thread_size=40, force=True)
コード例 #4
0
def run_wrfplus_ad(work_root, wrfplus_root, config, args):
	start_time = config['custom']['start_time']
	end_time = config['custom']['end_time']
	datetime_fmt = 'YYYY-MM-DD_HH:mm:ss'
	start_time_str = start_time.format(datetime_fmt)
	max_dom = config['domains']['max_dom']

	wrf_work_dir = os.path.abspath(work_root) + '/wrf'
	if not os.path.isdir(wrf_work_dir):
		cli.error(f'WRF work directory {wrf_work_dir} does not exist!')

	wrfplus_work_dir = os.path.abspath(work_root) + '/wrfplus'
	if not os.path.isdir(wrfplus_work_dir):
		cli.error(f'WRFPLUS has not been configured! Run config_wrfplus.py first.')
	os.chdir(wrfplus_work_dir)

	if os.path.isfile(f'{wrf_work_dir}/wrfinput_d01_{start_time_str}'):
		run(f'ln -sf {wrf_work_dir}/wrfinput_d01 .')
	elif os.path.isfile(f'{wrf_work_dir}/wrfout_d01_{start_time_str}'):
		run(f'ln -sf {wrf_work_dir}/wrfout_d01_{start_time_str} wrfinput_d01')
	run(f'ln -sf {wrf_work_dir}/wrfbdy_d01 .')
	if not os.path.isfile('final_sens_d01'):
		cli.error('There is no final_sens_d01 file!')

	version = wrf_version(wrfplus_root)

	cli.stage(f'Run WRFPLUS at {wrfplus_work_dir} ...')
	expected_files = ['wrfout_d{:02d}_{}'.format(i + 1, start_time_str) for i in range(max_dom)]
	expected_files.append(f'init_sens_d01_{start_time_str}')
	if not check_files(expected_files) or args.force:
		run('rm -f wrfout_*')
		run(f'ln -sf {wrfplus_root}/run/LANDUSE.TBL .')
		run(f'ln -sf {wrfplus_root}/run/VEGPARM.TBL .')
		run(f'ln -sf {wrfplus_root}/run/SOILPARM.TBL .')
		run(f'ln -sf {wrfplus_root}/run/GENPARM.TBL .')
		run(f'ln -sf {wrfplus_root}/run/RRTM_DATA_DBL RRTM_DATA')
		run(f'ln -sf {wrfplus_root}/run/ETAMPNEW_DATA_DBL ETAMPNEW_DATA')
		if version >= Version('4.0'):
			cmd = f'{wrfplus_root}/run/wrfplus.exe'
		else:
			cmd = f'{wrfplus_root}/run/wrf.exe'
		retries = 0
		while True:
			submit_job(cmd, args.np, config, args, wait=True)
			if os.path.isfile(f'gradient_wrfplus_d01_{start_time_str}'):
				run(f'mv gradient_wrfplus_d01_{start_time_str} init_sens_d01_{start_time_str}')
			if not check_files(expected_files):
				if retries == 10:
					cli.error(f'Failed! Check output {os.path.abspath(wrfplus_work_dir)}/rsl.error.0000.')
				retries = retries + 1
				cli.warning('Failed to run wrfplus, retry it!')
			else:
				break
		cli.notice('Succeeded.')
	else:
		cli.notice('File wrfout_* already exist.')
	run(f'ls -l {wrfplus_work_dir}/wrfout_*')
コード例 #5
0
 def download_gdas(time):
     dir_name = f'gdas.{time.format("YYYYMMDD")}/{time.format("HH")}'
     res = requests.head(f'{root_url}/{dir_name}/')
     if res.status_code != 200 and res.status_code != 302:
         cli.error(f'Remote GDAS data at {time} do not exist!')
     file_name = 'gdas.t{:02d}z.prepbufr.nr'.format(time.hour)
     url = f'{root_url}/{dir_name}/{file_name}'
     if not os.path.isdir(f'{output_root}/{dir_name}'):
         os.makedirs(f'{output_root}/{dir_name}')
         cli.notice(f'Create directory {output_root}/{dir_name}.')
     cli.notice(f'Downloading {url}.')
     local_file_path = f'{output_root}/{dir_name}/{file_name}'
     if is_downloading(local_file_path):
         cli.warning(f'Skip downloading {local_file_path}.')
         return
     if os.path.isfile(local_file_path):
         if check_file_size(url, local_file_path):
             cli.notice(f'File {local_file_path} exists.')
             return
         else:
             # File is not downloaded completely.
             os.remove(local_file_path)
     try:
         subprocess.call(['curl', '-C', '-', '-o', local_file_path, url])
     except Exception as e:
         cli.error(f'Encounter exception {e}!')
     if not check_file_size(url, local_file_path):
         os.remove(local_file_path)
         cli.error(f'Failed to download {file_name}!')
コード例 #6
0
def run_upp(work_root, upp_root, config, args):
	start_time = config['custom']['start_time']
	end_time = config['custom']['end_time']
	max_dom = config['domains']['max_dom']

	start_time_str = start_time.format('YYYY-MM-DD_HH:mm:ss')
	end_time_str = end_time.format('YYYY-MM-DD_HH:mm:ss')

	wrf_work_dir = work_root + '/wrf'
	if not os.path.isdir(wrf_work_dir): cli.error('WRF is not run successfully!')

	upp_work_dir = work_root + '/upp'
	if not os.path.isdir(upp_work_dir): os.makedirs(upp_work_dir)
	os.chdir(upp_work_dir)

	if not os.path.isdir(f'{upp_work_dir}/parm'): run(f'mkdir {upp_work_dir}/parm')
	if not os.path.isdir(f'{upp_work_dir}/postprd'): run(f'mkdir {upp_work_dir}/postprd')

	run(f'cp {upp_root}/parm/postxconfig-NT-WRF.txt {upp_work_dir}/parm')
	run(f'cp {upp_root}/parm/postcntrl.xml {upp_work_dir}/parm')
	run(f'cp {upp_root}/parm/post_avblflds.xml {upp_work_dir}/parm')
	run(f'cp {upp_root}/parm/wrf_cntrl.parm {upp_work_dir}/parm')
	run(f'cp {upp_root}/scripts/run_unipost {upp_work_dir}/postprd')

	edit_file('./postprd/run_unipost', [
		['/bin/ksh', '/bin/bash'],
		['TOP_DIR=.*', f'TOP_DIR={upp_root}'],
		['DOMAINPATH=.*', f'DOMAINPATH={upp_work_dir}'],
		['UNIPOST_HOME=.*', f'UNIPOST_HOME={upp_root}'],
		['modelDataPath=.*', f'modelDataPath={wrf_work_dir}'],
		['startdate=.*', f'startdate={start_time.format("YYYYMMDDHH")}'],
		['lastfhr=.*', f'lastfhr={(end_time - start_time).hours}'],
		['incrementhr=.*', f'incrementhr=01'],
		['domain_list=.*', f'domain_list="d02"']
	])

	run(f'./postprd/run_unipost')

	cli.notice('Succeeded.')
コード例 #7
0
def config_wps(work_root, wps_root, geog_root, config, args):
    if has_key(config, ('custom', 'start_time')):
        start_time = config['custom']['start_time']
        start_time_str = start_time.format('YYYY-MM-DD_HH:mm:ss')
        if not has_key(config, ('custom', 'end_time')):
            cli.error('custom->end_time does not exist in config file!')
        end_time = config['custom']['end_time']
        end_time_str = end_time.format('YYYY-MM-DD_HH:mm:ss')

    if not has_key(config, ('domains', 'max_dom')):
        cli.error('domains->max_dom does not exist in config file!')
    max_dom = config['domains']['max_dom']

    wps_work_dir = work_root + '/wps'
    if not os.path.isdir(wps_work_dir): os.makedirs(wps_work_dir)
    os.chdir(wps_work_dir)

    version = wrf_version(wps_root)
    if version < Version('3.9.1'):
        cli.error(
            f'WPS {version} may not handle GFS data correctly! Please use WPS >= 3.9.1.'
        )

    cli.notice('Edit namelist.wps for WPS.')
    copy(f'{wps_root}/namelist.wps', 'namelist.wps')
    namelist_wps = f90nml.read('namelist.wps')
    namelist_wps['share']['max_dom'] = max_dom
    if has_key(config, ('custom', 'start_time')):
        namelist_wps['share']['start_date'] = [
            start_time_str for i in range(max_dom)
        ]
    if has_key(config, ('custom', 'end_time')):
        namelist_wps['share']['end_date'] = [
            end_time_str if i == 0 else start_time_str for i in range(max_dom)
        ]
    if has_key(config, ('custom', 'background')) and has_key(
            config, ('custom', 'background', 'interval_seconds')):
        namelist_wps['share']['interval_seconds'] = config['custom'][
            'background']['interval_seconds']
    namelist_wps['geogrid']['geog_data_path'] = geog_root
    for key, value in config['geogrid'].items():
        namelist_wps['geogrid'][key] = value
    namelist_wps['geogrid']['opt_geogrid_tbl_path'] = wps_work_dir
    namelist_wps['metgrid']['opt_metgrid_tbl_path'] = wps_work_dir
    if 'metgrid' in config:
        for key, value in config['metgrid'].items():
            namelist_wps['metgrid'][key] = value
    namelist_wps.write('./namelist.wps', force=True)
    run(f'ncl -Q {script_root}/../plots/plot_domains.ncl')
    cli.notice(f'Check {wps_work_dir}/wps_show_dom.pdf for domains.')

    cli.notice('Succeeded.')
コード例 #8
0
def run_met(work_root, met_root, config, args):
	start_time = config['custom']['start_time']
	end_time = config['custom']['end_time']
	max_dom = config['domains']['max_dom']

	start_time_str = start_time.format('YYYY-MM-DD_HH:mm:ss')
	end_time_str = end_time.format('YYYY-MM-DD_HH:mm:ss')

	upp_work_dir = work_root + '/upp'
	if not os.path.isdir(upp_work_dir): cli.error('UPP is not run successfully!')

	met_work_dir = work_root + '/met'
	if not os.path.isdir(met_work_dir): os.makedirs(met_work_dir)
	os.chdir(met_work_dir)

	cli.stage('Prepare observation file.')

	expected_files = ['ob.nc']
	if not check_files(expected_files) or args.force:
		if args.littler_root:
			if 'obs' in config['custom']:
				if 'little_r' in config['custom']['obs']:
					dir_pattern = config['custom']['obs']['little_r']['dir_pattern']
					file_pattern = config['custom']['obs']['little_r']['file_pattern']
					obs_dir = Template(dir_pattern).render(obs_time=start_time)
					obs_file = Template(file_pattern).render(obs_time=start_time)
					if not os.path.isfile(f'{args.littler_root}/{obs_dir}/{obs_file}'):
						cli.error(f'Observation {args.littler_root}/{obs_dir}/{obs_file} does not exist!')
					run(f'{met_root}/bin/ascii2nc -format little_r {args.littler_root}/{obs_dir}/{obs_file} ob.nc')
		elif args.prepbufr_root:
			pass

		if not check_files(('ob.nc')):
			cli.error('Failed to prepare netCDF observation file!')
	run(f'ls -l {met_work_dir}/ob.nc')

	cli.stage('Prepare configuration file.')

	expected_files = ['foo']
	if not check_files(expected_files) or args.force:
		run(f'cp -f {met_root}/share/met/config/PointStatConfig_default PointStatConfig')

	cli.notice('Succeeded.')
コード例 #9
0
                        '--verbose',
                        help='Print out build log',
                        action='store_true')
    parser.add_argument('-f',
                        '--force',
                        help='Force to rebuild if already built',
                        action='store_true')
    args = parser.parse_args()

    if not args.wrf_root:
        if os.getenv('WRF_ROOT'):
            args.wrf_root = os.getenv('WRF_ROOT')
        elif args.codes:
            args.wrf_root = args.codes + '/WRF'
    if args.wrf_root: args.wrf_root = os.path.abspath(args.wrf_root)

    if not args.gsi_root:
        if os.getenv('GSI_ROOT'):
            args.gsi_root = os.getenv('GSI_ROOT')
        elif args.codes:
            args.gsi_root = args.codes + '/GSI'
        else:
            cli.error(
                'Option --gsi-root or environment variable GSI_ROOT need to be set!'
            )
    args.gsi_root = os.path.abspath(args.gsi_root)
    if not os.path.isdir(args.gsi_root):
        cli.error(f'Directory {args.gsi_root} does not exist!')

    build_gsi(args.wrf_root, args.gsi_root, args)
コード例 #10
0
def run_wrf(work_root, wrf_root, config, args, wrfda_work_dir=None, tag=None):
	start_time = config['custom']['start_time']
	end_time = config['custom']['end_time']
	datetime_fmt = 'YYYY-MM-DD_HH:mm:ss'
	start_time_str = start_time.format(datetime_fmt)
	end_time_str = end_time.format(datetime_fmt)
	max_dom = config['domains']['max_dom']

	if not wrfda_work_dir:
		if tag != None:
			wrfda_work_dir = f'{work_root}/wrfda_{tag}'
		else:
			wrfda_work_dir = f'{work_root}/wrfda'
	elif not os.path.isdir(wrfda_work_dir):
		cli.error(f'run_wrf: {wrfda_work_dir} does not exist!')

	if tag != None:
		wrf_work_dir = f'{work_root}/wrf_{tag}'
	else:
		wrf_work_dir = f'{work_root}/wrf'
	if not os.path.isdir(wrf_work_dir): cli.error(f'run_wrf: {wrf_work_dir} does not exist!')
	os.chdir(wrf_work_dir)

	all_wrfda_ok = True
	for dom_idx in range(max_dom):
		dom_str = 'd' + str(dom_idx + 1).zfill(2)
		if not copy_wrfda_output(dom_str, start_time_str, wrfda_work_dir):
			all_wrfda_ok = False
			break
	if not all_wrfda_ok:
		cli.warning('Do not use data assimilation.')
		expected_files = ['wrfinput_d{:02d}_{}'.format(i + 1, start_time_str) for i in range(max_dom)]
		expected_files.append(f'wrfbdy_d01_{start_time_str}')
		if not check_files(expected_files):
			cli.error('real.exe wasn\'t executed successfully!')
		for i in range(max_dom):
			run('ln -sf wrfinput_d{0:02d}_{1} wrfinput_d{0:02d}'.format(i + 1, start_time_str))
		run(f'ln -sf wrfbdy_d01_{start_time_str} wrfbdy_d01')

	cli.stage(f'Run wrf.exe at {wrf_work_dir} ...')
	expected_files = ['wrfout_d{:02d}_{}'.format(i + 1, end_time_str) for i in range(max_dom)]
	if not check_files(expected_files) or args.force:
		run('rm -f wrfout_*')
		run(f'ln -sf {wrf_root}/run/LANDUSE.TBL .')
		run(f'ln -sf {wrf_root}/run/ozone_plev.formatted .')
		run(f'ln -sf {wrf_root}/run/ozone_lat.formatted .')
		run(f'ln -sf {wrf_root}/run/ozone.formatted .')
		run(f'ln -sf {wrf_root}/run/RRTM_DATA_DBL RRTM_DATA')
		run(f'ln -sf {wrf_root}/run/RRTMG_LW_DATA .')
		run(f'ln -sf {wrf_root}/run/RRTMG_SW_DATA .')
		run(f'ln -sf {wrf_root}/run/VEGPARM.TBL .')
		run(f'ln -sf {wrf_root}/run/SOILPARM.TBL .')
		run(f'ln -sf {wrf_root}/run/GENPARM.TBL .')
		retries = 0
		while True:
			submit_job(f'{wrf_root}/run/wrf.exe', args.np, config, args, wait=True)
			if not check_files(expected_files):
				if retries == 0:
					cli.error(f'Failed! Check output {os.path.abspath(wrf_work_dir)}/rsl.error.0000.')
				retries = retries + 1
				cli.warning(f'Failed to run wrf, retry it! {retries}')
			else:
				break
		cli.notice('Succeeded.')
	else:
		cli.notice('File wrfout_* already exist.')
	run(f'ls -l {wrf_work_dir}/wrfout_*')
コード例 #11
0
    parser.add_argument('-j',
                        '--config-json',
                        dest='config_json',
                        help='Configuration JSON file.')
    parser.add_argument('-f',
                        '--force',
                        help='Force to run',
                        action='store_true')
    args = parser.parse_args()

    if not args.work_root:
        if os.getenv('WORK_ROOT'):
            args.work_root = os.getenv('WORK_ROOT')
        else:
            cli.error(
                'Option --work-root or environment variable WORK_ROOT need to be set!'
            )
    args.work_root = os.path.abspath(args.work_root)
    if not os.path.isdir(args.work_root):
        cli.error(f'Directory {args.work_root} does not exist!')

    if not args.wrf_root:
        if os.getenv('WRF_ROOT'):
            args.wrf_root = os.getenv('WRF_ROOT')
        elif args.codes:
            args.wrf_root = args.codes + '/WRF'
        else:
            cli.error(
                'Option --wrf-root or environment variable WRF_ROOT need to be set!'
            )
    args.wrf_root = os.path.abspath(args.wrf_root)
コード例 #12
0
def run_real(work_root, wps_work_dir, wrf_root, config, args, tag=None):
    start_time = config['custom']['start_time']
    datetime_fmt = 'YYYY-MM-DD_HH:mm:ss'
    start_time_str = start_time.format(datetime_fmt)
    max_dom = config['domains']['max_dom']

    if not os.path.isdir(wps_work_dir):
        cli.error(f'WPS work directory {wps_work_dir} does not exist!')
    if tag != None:
        wrf_work_dir = f'{work_root}/wrf_{tag}'
    else:
        wrf_work_dir = f'{work_root}/wrf'
    if not os.path.isdir(wrf_work_dir): os.mkdir(wrf_work_dir)
    os.chdir(wrf_work_dir)

    cli.stage(f'Run real.exe at {wrf_work_dir} ...')
    expected_files = [
        'wrfinput_d{:02d}_{}'.format(i + 1, start_time_str)
        for i in range(max_dom)
    ]
    expected_files.append('wrfbdy_d01')
    if not check_files(expected_files) or args.force:
        run('rm -f wrfinput_* met_em.*.nc')
        run(f'ln -sf {wps_work_dir}/met_em.*.nc .')
        try:
            dataset = Dataset(glob('met_em.*.nc')[0])
        except:
            cli.error('Failed to open one of met_em.*.nc file!')
        # Check met_em file.
        if not 'num_st_layers' in dataset.dimensions or dataset.dimensions[
                'num_st_layers'].size == 0:
            cli.error(
                'Failed to run ungrib and metgrid due to num_metgrid_soil_levels is zero!'
            )
        namelist_input = f90nml.read('./namelist.input')
        namelist_input['domains']['num_metgrid_levels'] = dataset.dimensions[
            'num_metgrid_levels'].size
        namelist_input['physics']['num_land_cat'] = dataset.getncattr(
            'NUM_LAND_CAT')
        if 'num_st_layers' in dataset.dimensions:
            namelist_input['domains'][
                'num_metgrid_soil_levels'] = dataset.dimensions[
                    'num_st_layers'].size
        else:
            cli.warning(
                f'Dimension num_st_layers is not in {dataset.filepath()}! Set num_metgrid_soil_levels to 0.'
            )
            namelist_input['domains']['num_metgrid_soil_levels'] = 0
        dataset.close()
        namelist_input.write('./namelist.input', force=True)
        submit_job(f'{wrf_root}/run/real.exe',
                   args.np,
                   config,
                   args,
                   wait=True)
        for i in range(max_dom):
            if not os.path.isfile('wrfinput_d{0:02d}'.format(i + 1)):
                # Check if the failure is caused by parallel computing?
                cli.warning(
                    'Failed to run real.exe in parallel. Try to run in serial.'
                )
                submit_job(f'{wrf_root}/run/real.exe',
                           1,
                           config,
                           args,
                           wait=True)
                if not os.path.isfile('wrfinput_d{0:02d}'.format(i + 1)):
                    cli.error(
                        f'Still failed to generate wrfinput_d{0:02d}! See {wrf_work_dir}/rsl.error.0000.'
                        .format(i + 1))
            run('ln -sf wrfinput_d{0:02d} wrfinput_d{0:02d}_{1}'.format(
                i + 1, start_time_str))
        if os.path.isfile('wrfbdy_d01'):
            run(f'ln -sf wrfbdy_d01 wrfbdy_d01_{start_time_str}')
        cli.notice('Succeeded.')
    else:
        run('ls -l wrfinput_* wrfbdy_*')
        cli.notice('File wrfinput_* already exist.')
コード例 #13
0
import argparse
import ftplib
import math
import pendulum
import re
import os
import pygrib
import shutil
from glob import glob
import sys
sys.path.append(f'{os.path.dirname(os.path.realpath(__file__))}/../utils')
from utils import cli, ftp_exist, ftp_list, ftp_get

if not os.getenv('CIMISS_FTP_HOST'):
	cli.error('CIMISS_FTP_HOST is not set!')
if not os.getenv('CIMISS_FTP_USER'):
	cli.error('CIMISS_FTP_USER is not set!')
if not os.getenv('CIMISS_FTP_PASSWD'):
	cli.error('CIMISS_FTP_PASSWD is not set!')

time_interval = pendulum.Duration(hours=6)

def parse_datetime(string):
	match = re.match(r'(\d{4}\d{2}\d{2}\d{2})(\d{2})?', string)
	if match.group(2):
		return pendulum.from_format(string, 'YYYYMMDDHHmm')
	else:
		return pendulum.from_format(string, 'YYYYMMDDHH')

def parse_datetime_range(string):
コード例 #14
0
def build_wrf(wrf_root, wps_root, wrfplus_root, wrfda_root, args):
	if not 'HDF5' in os.environ:
		res = subprocess.run(['which', 'h5dump'], stdout=subprocess.PIPE)
		if res.returncode == 0:
			os.environ['HDF5'] = os.path.dirname(os.path.dirname(res.stdout.decode('utf-8')))
			cli.notice(f'Set HDF5 to {os.environ["HDF5"]}')
	if not 'HDF5' in os.environ:
		cli.warning('HDF5 environment variable is not set')

	if not 'NETCDF' in os.environ:
		res = subprocess.run(['which', 'nf-config'], stdout=subprocess.PIPE)
		if res.returncode == 0:
			os.environ['NETCDF'] = os.path.dirname(os.path.dirname(res.stdout.decode('utf-8')))
			res = subprocess.run(['nf-config', '--includedir'], stdout=subprocess.PIPE)
			os.environ['NETCDF_INC'] = res.stdout.decode('utf-8').strip()
			res = subprocess.run(['nf-config', '--flibs'], stdout=subprocess.PIPE)
			os.environ['NETCDF_LIB'] = re.search(r'-L([^ ]*)', res.stdout.decode('utf-8'))[1]
			cli.notice(f'Set NETCDF_INC to {os.environ["NETCDF_INC"]}')
			cli.notice(f'Set NETCDF_LIB to {os.environ["NETCDF_LIB"]}')
	if not 'NETCDF' in os.environ:
		cli.warning('NETCDF environment variable is not set!')

	if not 'JASPERINC' in os.environ or not 'JASPERLIB' in os.environ:
		if 'JASPER_ROOT' in os.environ:
			os.environ['JASPERINC'] = os.environ['JASPER_ROOT'] + '/include'
			os.environ['JASPERLIB'] = os.environ['JASPER_ROOT'] + '/lib'
			cli.notice(f'Set JASPERINC to {os.environ["JASPERINC"]}.')
			cli.notice(f'Set JASPERLIB to {os.environ["JASPERLIB"]}.')
		else:
			cli.error('JASPERINC and JASPERLIB environment variables are not set!')

	if not 'LIBPNG_ROOT' in os.environ:
		cli.warning('LIBPNG_ROOT environment variable is not set. Library PNG may not be found!')

	if not 'WRFIO_NCD_LARGE_FILE_SUPPORT' in os.environ:
		os.environ['WRFIO_NCD_LARGE_FILE_SUPPORT'] = '1'
		cli.notice('Set WRFIO_NCD_LARGE_FILE_SUPPORT to 1.')

	if args.rttov:
		os.environ['RTTOV'] = args.rttov
		cli.notice(f'Use RTTOV in {args.rttov}.')

	# ---------------------------------------------------------------------------------
	#                                    WRF
	os.chdir(wrf_root)
	version = wrf_version(wrf_root)
	if version <= Version('3.6.1'):
		os.environ['BUFR'] = '1'
	# Fix possible code bugs.
	if Version('3.6.1') <= version <= Version('3.8.1'):
		edit_file('phys/module_cu_g3.F', [['integer,  dimension \(12\) :: seed', 'integer,  dimension (33) :: seed']])
	if args.force: run('./clean -a 1> /dev/null 2>&1')
	expected_exe_files = ('main/wrf.exe', 'main/real.exe', 'main/ndown.exe', 'main/tc.exe')
	if not check_files(expected_exe_files):
		cli.notice('Configure WRF ...')
		if args.use_grib:
			cli.notice('Set GRIB2 flag.')
			edit_file('./arch/Config.pl', [
				['\$I_really_want_to_output_grib2_from_WRF = "FALSE"', '$I_really_want_to_output_grib2_from_WRF = "TRUE"']
			])
		if args.use_hyb:
			child = pexpect.spawn('./configure -hyb', encoding='utf-8')
		else:
			child = pexpect.spawn('./configure', encoding='utf-8')
		child.expect('Enter selection.*')
		if platform.system() == 'Darwin':
			if args.compiler_suite == 'gnu':
				child.sendline('15')
		else:
			if args.compiler_suite == 'intel':
				if args.openmp:
					child.sendline('16') # INTEL (ifort/icc) dm+sm
				else:
					child.sendline('15') # INTEL (ifort/icc) dmpar
			elif args.compiler_suite == 'gnu':
				if args.openmp:
					child.sendline('35') # GNU (gfortran/gcc) dm+sm
				else:
					child.sendline('34') # GNU (gfortran/gcc) dmpar
			elif args.compiler_suite == 'pgi':
				if args.openmp:
					child.sendline('55') # PGI (pgf90/pgcc) dm+sm
				else:
					child.sendline('54') # PGI (pgf90/pgcc) dmpar
		child.expect('Compile for nesting.*:')
		child.sendline('1')
		if platform.system() == 'Darwin': child.expect('This build of WRF will use NETCDF4 with HDF5 compression')
		child.wait()

		if args.compiler_suite == 'intel':
			edit_file('./configure.wrf', [
				['mpif90', 'mpiifort'],
				['mpicc', 'mpiicc']
			])
		elif args.compiler_suite == 'pgi':
			edit_file('./configure.wrf', [
				['pgf90', 'pgfortran'],
				['mpif90', 'mpifort']
			])

		# Fix for OpenMPI.
		edit_file('./configure.wrf', [
			['DM_CC\s*=\s*mpicc\s*$', 'DM_CC = mpicc -DMPI2_SUPPORT\n']
		])

		cli.notice('Compile WRF ...')
		if args.debug:
			if args.compiler_suite == 'intel':
				debug_options = '-O0 -g -traceback'
			elif args.compiler_suite == 'gnu':
				debug_options = '-O0 -g -fbacktrace'
			edit_file('configure.wrf', [
				['FCFLAGS\s*=\s*\$\(FCOPTIM\)\s*\$\(FCBASEOPTS\)', f'FCFLAGS = {debug_options} $(FCBASEOPTS)']
			])
		if args.verbose:
			run(f'./compile em_real')
		else:
			run(f'./compile em_real 1> compile.out 2>&1')
		
		if check_files(expected_exe_files):
			cli.notice('Succeeded.')
		else:
			if args.verbose:
				cli.error('Failed!')
			else:
				cli.error(f'Failed! Check {wrf_root}/compile.out')
	else:
		cli.notice('WRF is already built.')

	# ---------------------------------------------------------------------------------
	#                                    WPS
	os.chdir(wps_root)
	if args.force: run('./clean -a 1> /dev/null 2>&1')
	expected_exe_files = ('geogrid/src/geogrid.exe', 'metgrid/src/metgrid.exe', 'ungrib/src/ungrib.exe')
	if not check_files(expected_exe_files):
		cli.notice('Configure WPS ...')
		child = pexpect.spawn('./configure')
		child.expect('Enter selection.*')
		if args.compiler_suite == 'intel':
			child.sendline('19') # Linux x86_64, Intel compiler    (dmpar)
		elif args.compiler_suite == 'gnu':
			child.sendline('3')  # Linux x86_64, gfortran    (dmpar)
		elif args.compiler_suite == 'pgi':
			child.sendline('7')
		child.wait()

		if args.compiler_suite == 'intel':
			edit_file('./configure.wps', [
				['mpif90', 'mpiifort'],
				['mpicc', 'mpiicc']
			])
		elif args.compiler_suite == 'pgi':
			edit_file('./configure.wps', [
				['pgf90', 'pgfortran'],
				['mpif90', 'mpifort']
			])
		else:
			run('sed -i "s/mpicc -cc=.*/mpicc/" configure.wps')
			run('sed -i "s/mpif90 -f90=.*/mpif90/" configure.wps')

		run('sed -i "s/WRF_DIR\s*=.*/WRF_DIR = ..\/WRF/" configure.wps')
		if 'LIBPNG_ROOT' in os.environ:
			run(f'sed -i "s@COMPRESSION_LIBS\s*=\(.*\)@COMPRESSION_LIBS = \\1 -L{os.environ["LIBPNG_ROOT"]}/lib@" configure.wps')
			run(f'sed -i "s@COMPRESSION_INC\s*=\(.*\)@COMPRESSION_INC = \\1 -I{os.environ["LIBPNG_ROOT"]}/include@" configure.wps')

		if args.compiler_suite == 'gnu':
			# Fix for gfortran 9.1.0.
			edit_file('ungrib/src/ngl/g2/intmath.f', [['iand\(i,i-1\)/=0', 'iand(i,i-1_8)/=0']], return_on_first_match=True)
			edit_file('ungrib/src/ngl/g2/intmath.f', [['iand\(i,i-1\)/=0', 'iand(i,i-1_4)/=0']], return_on_first_match=True)
			edit_file('ungrib/src/ngl/g2/intmath.f', [['iand\(i,i-1\)/=0', 'iand(i,i-1_2)/=0']], return_on_first_match=True)
			edit_file('ungrib/src/ngl/g2/intmath.f', [['iand\(i,i-1\)/=0', 'iand(i,i-1_1)/=0']], return_on_first_match=True)

		# Fix for OpenMPI.
		edit_file('./configure.wps', [
			['DM_CC\s*=\s*mpicc\s*$', 'DM_CC = mpicc -DMPI2_SUPPORT\n']
		])

		cli.notice('Compile WPS ...')
		if args.verbose:
			run('./compile')
		else:
			run('./compile 1> compile.out 2>&1')

		if check_files(expected_exe_files):
			cli.notice('Succeeded.')
		else:
			if args.verbose:
				cli.error('Failed!')
			else:
				cli.error(f'Failed! Check {wps_root}/compile.out')
	else:
		cli.notice('WPS is already built.')

	# ---------------------------------------------------------------------------------
	#                                    WRFPLUS
	os.chdir(wrfplus_root)
	if args.force: run('./clean -a 1> /dev/null 2>&1')
	if Version('3.6.1') <= version <= Version('3.9.1'):
		edit_file('phys/module_cu_g3.F', [['integer,  dimension \(12\) :: seed', 'integer,  dimension (33) :: seed']])
		if version == Version('3.6.1'):
			line_number = 841
		elif version == Version('3.8.1'):
			line_number = 855
		elif version == Version('3.9.1'):
			line_number = 856
		else:
			error('Find out the wrong OpenMP directive in WRFPLUS/main/module_wrf_top.F!')
		edit_file('main/module_wrf_top.F', [[line_number, '   !$OMP DEFAULT (SHARED) PRIVATE ( ij )\n']])
	if version >= Version('4.0'):
		expected_exe_files = ('main/wrfplus.exe')
	else:
		expected_exe_files = ('main/wrf.exe')
	if not check_files(expected_exe_files):
		cli.notice('Configure WRFPLUS ...')
		if args.use_grib:
			cli.notice('Set GRIB2 flag.')
			edit_file('./arch/Config.pl', [
				['\$I_really_want_to_output_grib2_from_WRF = "FALSE"', '$I_really_want_to_output_grib2_from_WRF = "TRUE"']
			])
		child = pexpect.spawn('./configure wrfplus')
		child.expect('Enter selection.*')
		if args.compiler_suite == 'intel':
			if version <= Version('3.6.1'):
				child.sendline('8')
			else:
				child.sendline('34')
		elif args.compiler_suite == 'gnu':
			child.sendline('18')
		elif args.compiler_suite == 'pgi':
			child.sendline('28')
		child.wait()

		if args.compiler_suite == 'intel':
			edit_file('./configure.wrf', [
				['mpif90', 'mpiifort'],
				['mpicc', 'mpiicc'],
				['override-limits', 'qoverride-limits']
			])

		# Fix for OpenMPI.
		edit_file('./configure.wrf', [
			['DM_CC\s*=\s*mpicc\s*$', 'DM_CC = mpicc -DMPI2_SUPPORT\n']
		])

		cli.notice('Compile WRFPLUS ...')
		if args.debug:
			if args.compiler_suite == 'intel':
				debug_options = '-O0 -g -traceback'
			elif args.compiler_suite == 'gnu':
				debug_options = '-O0 -g -fbacktrace'
			edit_file('configure.wrf', [
				['FCFLAGS\s*=\s*\$\(FCOPTIM\)\s*\$\(FCBASEOPTS\)', f'FCFLAGS = {debug_options} $(FCBASEOPTS)']
			])
		if version >= Version('4.0'):
			build_target = 'wrfplus'
		else:
			build_target = 'wrf'
		if args.verbose:
			run(f'./compile {build_target}')
		else:
			run(f'./compile {build_target} 1> compile.out 2>&1')

		if check_files(expected_exe_files):
			cli.notice('Succeeded.')
		else:
			if args.verbose:
				cli.error('Failed!')
			else:
				cli.error(f'Failed! Check {wrfplus_root}/compile.out')
	else:
		cli.notice('WRFPLUS is already built.')

	# ---------------------------------------------------------------------------------
	#                                    WRFDA
	os.chdir(wrfda_root)
	os.environ['WRFPLUS_DIR'] = wrfplus_root
	if args.force: run('./clean -a 1> /dev/null 2>&1')
	if Version('3.6.1') <= version <= Version('3.9.1'):
		cli.warning(f'Fix {wrfda_root}/var/da/da_define_structures/da_zero_y.inc')
		edit_file('var/da/da_define_structures/da_zero_y.inc', [
			[', value \)', ', value_ )'],
			[':: value$', ':: value_\nreal value'],
			['if \(.not.\(present\(value\)\)\) value = 0.0', '''
   if (.not.(present(value_))) then
      value = 0.0
   else
      value = value_
   end if
''']
		])
	if version == Version('4.1.1'):
		cli.warning(f'Fix {wrfda_root}/share/input_wrf.F')
		edit_file('share/input_wrf.F', [
			['FUNCTION check_which_switch', 'FUNCTION check_which_switch1']
		])
	expected_exe_files = [
		'var/build/da_advance_time.exe',
		'var/build/da_bias_airmass.exe',
		'var/build/da_bias_scan.exe',
		'var/build/da_bias_sele.exe',
		'var/build/da_bias_verif.exe',
		'var/build/da_rad_diags.exe',
		'var/build/da_tune_obs_desroziers.exe',
		'var/build/da_tune_obs_hollingsworth1.exe',
		'var/build/da_tune_obs_hollingsworth2.exe',
		'var/build/da_update_bc_ad.exe',
		'var/build/da_update_bc.exe',
		'var/build/da_verif_grid.exe',
		'var/build/da_verif_obs.exe',
		'var/build/da_wrfvar.exe',
		'var/build/gen_be_addmean.exe',
		'var/build/gen_be_cov2d3d_contrib.exe',
		'var/build/gen_be_cov2d.exe',
		'var/build/gen_be_cov3d2d_contrib.exe',
		'var/build/gen_be_cov3d3d_bin3d_contrib.exe',
		'var/build/gen_be_cov3d3d_contrib.exe',
		'var/build/gen_be_cov3d.exe',
		'var/build/gen_be_diags.exe',
		'var/build/gen_be_diags_read.exe',
		'var/build/gen_be_ensmean.exe',
		'var/build/gen_be_ensrf.exe',
		'var/build/gen_be_ep1.exe',
		'var/build/gen_be_ep2.exe',
		'var/build/gen_be_etkf.exe',
		'var/build/gen_be_hist.exe',
		'var/build/gen_be_stage0_gsi.exe',
		'var/build/gen_be_stage0_wrf.exe',
		'var/build/gen_be_stage1_1dvar.exe',
		'var/build/gen_be_stage1.exe',
		'var/build/gen_be_stage1_gsi.exe',
		'var/build/gen_be_stage2_1dvar.exe',
		'var/build/gen_be_stage2a.exe',
		'var/build/gen_be_stage2.exe',
		'var/build/gen_be_stage2_gsi.exe',
		'var/build/gen_be_stage3.exe',
		'var/build/gen_be_stage4_global.exe',
		'var/build/gen_be_stage4_regional.exe',
		'var/build/gen_be_vertloc.exe',
		'var/build/gen_mbe_stage2.exe',
		'var/obsproc/src/obsproc.exe']
	if not check_files(expected_exe_files):
		cli.notice('Configure WRFDA ...')
		if args.use_grib:
			cli.notice('Set GRIB2 flag.')
			edit_file('./arch/Config.pl', [
				['\$I_really_want_to_output_grib2_from_WRF = "FALSE"', '$I_really_want_to_output_grib2_from_WRF = "TRUE"']
			])
		child = pexpect.spawn('./configure 4dvar')
		child.expect('Enter selection.*')
		if args.compiler_suite == 'intel':
			child.sendline('8')
		elif args.compiler_suite == 'gnu':
			child.sendline('18')
		elif args.compiler_suite == 'pgi':
			child.sendline('28')
		child.wait()

		if args.compiler_suite == 'intel':
			edit_file('./configure.wrf', [
				['mpif90', 'mpiifort'],
				['mpicc', 'mpiicc']
			])

		# Fix for OpenMPI.
		edit_file('./configure.wrf', [
			['DM_CC\s*=\s*mpicc\s*$', 'DM_CC = mpicc -DMPI2_SUPPORT\n']
		])

		cli.notice('Compile WRFDA ...')
		if args.debug:
			if args.compiler_suite == 'intel':
				debug_options = '-O0 -g -traceback'
			elif args.compiler_suite == 'gnu':
				debug_options = '-O0 -g -fbacktrace'
			edit_file('configure.wrf', [
				['FCFLAGS\s*=\s*\$\(FCOPTIM\)\s*\$\(FCBASEOPTS\)', f'FCFLAGS = {debug_options} $(FCBASEOPTS)']
			])
		if args.verbose:
			run(f'./compile all_wrfvar')
		else:
			run(f'./compile all_wrfvar 1> compile.out 2>&1')

		if check_files(expected_exe_files, fatal=True):
			cli.notice('Succeeded.')
		else:
			if args.verbose:
				cli.error('Failed!')
			else:
				cli.error(f'Failed! Check {wrfda_root}/compile.out')
	else:
		cli.notice('WRFDA is already built.')
コード例 #15
0
	cli.notice('Succeeded.')

if __name__ == '__main__':
	parser = argparse.ArgumentParser(description="Configure WRF model.\n\nLongrun Weather Inc., NWP operation software.\nCopyright (C) 2018 - All Rights Reserved.", formatter_class=argparse.RawTextHelpFormatter)
	parser.add_argument('-c', '--codes', help='Root directory of all codes (e.g. WRF, WPS)')
	parser.add_argument(      '--upp-root', dest='upp_root', help='UPP root directory (e.g. UPP)')
	parser.add_argument('-w', '--work-root',  dest='work_root', help='Work root directory')
	parser.add_argument('-j', '--config-json', dest='config_json', help='Configuration JSON file.')
	parser.add_argument('-f', '--force', help='Force to run', action='store_true')
	args = parser.parse_args()

	if not args.work_root:
		if os.getenv('WORK_ROOT'):
			args.work_root = os.getenv('WORK_ROOT')
		else:
			cli.error('Option --work-root or environment variable WORK_ROOT need to be set!')
	args.work_root = os.path.abspath(args.work_root)
	if not os.path.isdir(args.work_root):
		os.makedirs(args.work_root)
		cli.notice(f'Create work directory {args.work_root}.')

	if not args.upp_root:
		if os.getenv('UPP_ROOT'):
			args.upp_root = os.getenv('UPP_ROOT')
		elif args.codes:
			args.upp_root = args.codes + '/UPP'
		else:
			cli.error('Option --upp-root or environment variable UPP_ROOT need to be set!')
	args.upp_root = os.path.abspath(args.upp_root)
	if not os.path.isdir(args.upp_root):
		cli.error(f'Directory {args.upp_root} does not exist!')
コード例 #16
0
def run_wrfda_obsproc(work_root,
                      wrfda_root,
                      littler_root,
                      config,
                      args,
                      wrf_work_dir=None,
                      tag=None):
    start_time = config['custom']['start_time']
    datetime_fmt = 'YYYY-MM-DD_HH:mm:ss'
    start_time_str = start_time.format(datetime_fmt)

    if not wrf_work_dir:
        if tag != None:
            wrf_work_dir = f'{work_root}/wrf_{tag}'
        else:
            wrf_work_dir = f'{work_root}/wrf'

    if tag != None:
        wrfda_work_dir = f'{work_root}/wrfda_{tag}/obsproc'
    else:
        wrfda_work_dir = f'{work_root}/wrfda/obsproc'
    if not os.path.isdir(wrfda_work_dir): os.mkdir(wrfda_work_dir)
    os.chdir(wrfda_work_dir)

    cli.notice('Use builtin obserr.')
    run(f'ln -sf {wrfda_root}/var/obsproc/obserr.txt {wrfda_work_dir}')

    # Use d01 domain extent.
    if check_files([f'{wrf_work_dir}/wrfinput_d01_{start_time_str}']):
        ncfile = Dataset(f'{wrf_work_dir}/wrfinput_d01_{start_time_str}', 'r')
        iproj = ncfile.getncattr('MAP_PROJ')
        phic = ncfile.getncattr('CEN_LAT')
        xlonc = ncfile.getncattr('CEN_LON')
        moad_cen_lat = ncfile.getncattr('MOAD_CEN_LAT')
        standard_lon = ncfile.getncattr('STAND_LON')
        ncfile.close()
    else:
        iproj = config['geogrid']['map_proj']
        phic = config['geogrid']['ref_lat']
        xlonc = config['geogrid']['ref_lon']
        moad_cen_lat = config['geogrid']['ref_lat']
        standard_lon = config['geogrid']['ref_lon']

    output_format = get_value(config, ['custom', 'obsproc', 'output_format'],
                              default=2)
    time_window = get_value(config, ['custom', 'wrfda', 'time_window'],
                            default=360)

    if has_key(config, ('custom', 'da', 'type')):
        if config['custom']['da']['type'] == '3dvar':
            namelist_obsproc = f90nml.read(
                f'{wrfda_root}/var/obsproc/namelist.obsproc.3dvar.wrfvar-tut')
        else:
            cli.error('Currently, we only support 3DVar...')
    else:
        namelist_obsproc = f90nml.read(
            f'{wrfda_root}/var/obsproc/namelist.obsproc.3dvar.wrfvar-tut')

    namelist_obsproc['record1'][
        'obs_gts_filename'] = f'obs.gts.{start_time.format("YYYYMMDDHHmm")}'
    namelist_obsproc['record2']['time_window_min'] = start_time.subtract(
        minutes=time_window / 2).format('YYYY-MM-DD_HH:mm:ss')
    namelist_obsproc['record2']['time_analysis'] = start_time.format(
        'YYYY-MM-DD_HH:mm:ss')
    namelist_obsproc['record2']['time_window_max'] = start_time.add(
        minutes=time_window / 2).format('YYYY-MM-DD_HH:mm:ss')
    namelist_obsproc['record3']['max_number_of_obs'] = 1200000
    namelist_obsproc['record7']['PHIC'] = phic
    namelist_obsproc['record7']['XLONC'] = xlonc
    namelist_obsproc['record7']['MOAD_CEN_LAT'] = moad_cen_lat
    namelist_obsproc['record7']['STANDARD_LON'] = standard_lon
    namelist_obsproc['record8']['NESTIX'] = config['geogrid']['e_sn']
    namelist_obsproc['record8']['NESTJX'] = config['geogrid']['e_we']
    namelist_obsproc['record8']['DIS'] = config['geogrid']['dx']
    namelist_obsproc['record9']['OUTPUT_OB_FORMAT'] = output_format
    namelist_obsproc.write('./namelist.obsproc', force=True)

    cli.stage(f'Run obsproc.exe at {wrfda_work_dir} ...')
    expected_files = [
        f'obs_gts_{start_time.format("YYYY-MM-DD_HH:mm:ss")}.3DVAR'
    ]
    if not check_files(expected_files) or args.force:
        run('rm -f obs_gts_*')

        if has_key(config, ('custom', 'littler')):
            if 'dir_pattern' in config['custom'][
                    'littler'] and 'file_pattern' in config['custom'][
                        'littler']:
                dir_name = Template(
                    config['custom']['littler']['dir_pattern']).render(
                        time=start_time)
                file_name = Template(
                    config['custom']['littler']['file_pattern']).render(
                        time=start_time)
                littler_path = f'{littler_root}/{dir_name}/{file_name}'
            else:
                cli.error(
                    'No dir_pattern and file_pattern in custom->littler section!'
                )
        else:
            littler_path = f'{littler_root}/{start_time.format("YYYYMMDD")}/obs.gts.{start_time.format("YYYYMMDDHHmm")}'

        if os.path.exists(littler_path):
            run(f'ln -sf {littler_path} {wrfda_work_dir}/obs.gts.{start_time.format("YYYYMMDDHHmm")}'
                )
        else:
            cli.error(f'Failed! {littler_path} Not Found.')
        submit_job(f'{wrfda_root}/var/obsproc/obsproc.exe',
                   1,
                   config,
                   args,
                   wait=True)
        if not check_files(expected_files):
            cli.error(f'Failed!')
        cli.notice('Succeeded.')
    else:
        cli.notice('File obs_gts_* already exist.')
    run('ls -l obs_gts_*')
コード例 #17
0
                        help='Root directory to store GDAS data.')
    parser.add_argument(
        '-s',
        '--start-time',
        dest='start_time',
        help='Download GDAS data start in this date time (YYYYMMDDHH).',
        type=parse_time)
    parser.add_argument(
        '-e',
        '--end-time',
        dest='end_time',
        help='Download GDAS data end in this date time (YYYYMMDDHH).',
        type=parse_time)
    args = parser.parse_args()

    if not args.output_root:
        if os.getenv('RAWDATA_ROOT'):
            args.output_root = os.getenv('RAWDATA_ROOT') + '/gfs'
        else:
            cli.error(
                'Option --output-root or environment variable RAWDATA_ROOT need to be set!'
            )
    args.output_root = os.path.abspath(args.output_root)
    if not os.path.isdir(args.output_root):
        cli.error(f'Directory {args.output_root} does not exist!')

    if not args.end_time:
        args.end_time = args.start_time

    get_gdas(args.output_root, args.start_time, args.end_time, args)
コード例 #18
0
def build_nceplibs(nceplibs_root, args):
    if not 'HDF5' in os.environ:
        res = subprocess.run(['which', 'h5dump'], stdout=subprocess.PIPE)
        if res.returncode == 0:
            os.environ['HDF5'] = os.path.dirname(
                os.path.dirname(res.stdout.decode('utf-8')))
            cli.notice(f'Set HDF5 to {os.environ["HDF5"]}')
    if not 'HDF5' in os.environ:
        cli.warning('HDF5 environment variable is not set')

    if not 'NETCDF' in os.environ:
        res = subprocess.run(['which', 'ncdump'], stdout=subprocess.PIPE)
        if res.returncode == 0:
            os.environ['NETCDF'] = os.path.dirname(
                os.path.dirname(res.stdout.decode('utf-8')))
            cli.notice(f'Set NETCDF to {os.environ["NETCDF"]}')
    if not 'NETCDF' in os.environ:
        cli.warning('NETCDF environment variable is not set!')

    if not 'JASPER_INC' in os.environ or not 'JASPER_LIB' in os.environ:
        if 'JASPER_ROOT' in os.environ:
            os.environ['JASPER_INC'] = os.environ['JASPER_ROOT'] + '/include'
            os.environ['JASPER_LIB'] = os.environ['JASPER_ROOT'] + '/lib'
            cli.notice(f'Set JASPER_INC to {os.environ["JASPER_INC"]}.')
            cli.notice(f'Set JASPER_LIB to {os.environ["JASPER_LIB"]}.')
        else:
            cli.error(
                'JASPERINC and JASPERLIB environment variables are not set!')

    if not 'PNG_INC' in os.environ or not 'PNG_LIB' in os.environ:
        if 'LIBPNG_ROOT' in os.environ:
            os.environ['PNG_INC'] = os.environ['LIBPNG_ROOT'] + '/include'
            os.environ['PNG_LIB'] = os.environ['LIBPNG_ROOT'] + '/lib'
        else:
            os.environ['PNG_INC'] = '/usr/include'
            os.environ['PNG_LIB'] = '/usr/lib64'
        cli.notice(f'Set PNG_INC to {os.environ["PNG_INC"]}.')
        cli.notice(f'Set PNG_LIB to {os.environ["PNG_LIB"]}.')

    os.chdir(nceplibs_root)

    if args.compiler_suite == 'gnu':
        # Fix for gfortran 9.1.0.
        edit_file('src/g2/v3.1.0/src/intmath.f',
                  [['iand\(i,i-1\)/=0', 'iand(i,i-1_8)/=0']],
                  return_on_first_match=True)
        edit_file('src/g2/v3.1.0/src/intmath.f',
                  [['iand\(i,i-1\)/=0', 'iand(i,i-1_4)/=0']],
                  return_on_first_match=True)
        edit_file('src/g2/v3.1.0/src/intmath.f',
                  [['iand\(i,i-1\)/=0', 'iand(i,i-1_2)/=0']],
                  return_on_first_match=True)
        edit_file('src/g2/v3.1.0/src/intmath.f',
                  [['iand\(i,i-1\)/=0', 'iand(i,i-1_1)/=0']],
                  return_on_first_match=True)

    edit_file('make_ncep_libs.sh',
              [['read -p "Proceed\? \(y/n\) " yn', 'yn=y']])

    run(f'./make_ncep_libs.sh -s linux -c {args.compiler_suite} -d {args.nceplibs_root} -o 0 -a upp'
        )
コード例 #19
0
                        dest='np',
                        help='MPI process number to run WRF.',
                        default=2,
                        type=int)
    parser.add_argument('-f',
                        '--force',
                        help='Force to run',
                        action='store_true')
    args = parser.parse_args()

    if not args.work_root:
        if os.getenv('WORK_ROOT'):
            args.work_root = os.getenv('WORK_ROOT')
        else:
            cli.error(
                'Option --work-root or environment variable WORK_ROOT need to be set!'
            )
    args.work_root = os.path.abspath(args.work_root)
    if not os.path.isdir(args.work_root):
        cli.error(f'Directory {args.work_root} does not exist!')

    if not args.wps_root:
        if os.getenv('WPS_ROOT'):
            args.wps_root = os.getenv('WPS_ROOT')
        elif args.codes:
            args.wps_root = args.codes + '/WPS'
        else:
            cli.error(
                'Option --wps-root or environment variable WPS_ROOT need to be set!'
            )
    args.wps_root = os.path.abspath(args.wps_root)
コード例 #20
0
if __name__ == '__main__':
	parser = argparse.ArgumentParser(description="Configure WRF model.\n\nLongrun Weather Inc., NWP operation software.\nCopyright (C) 2018 - All Rights Reserved.", formatter_class=argparse.RawTextHelpFormatter)
	parser.add_argument(      '--met-root', dest='met_root', help='MET root directory (e.g. MET)')
	parser.add_argument('-w', '--work-root',  dest='work_root', help='Work root directory')
	parser.add_argument('-l', '--littler-root', dest='littler_root', help='LITTLE_R data root directory')
	parser.add_argument('-p', '--prepbufr-root', dest='prepbufr_root', help='PrepBUFR data root directory')
	parser.add_argument('-j', '--config-json', dest='config_json', help='Configuration JSON file.')
	parser.add_argument('-f', '--force', help='Force to run', action='store_true')
	args = parser.parse_args()

	if not args.work_root:
		if os.getenv('WORK_ROOT'):
			args.work_root = os.getenv('WORK_ROOT')
		else:
			cli.error('Option --work-root or environment variable WORK_ROOT need to be set!')
	args.work_root = os.path.abspath(args.work_root)
	if not os.path.isdir(args.work_root):
		os.makedirs(args.work_root)
		cli.notice(f'Create work directory {args.work_root}.')

	if not args.met_root:
		if os.getenv('MET_ROOT'):
			args.met_root = os.getenv('MET_ROOT')
		elif args.codes:
			args.met_root = args.codes + '/MET'
		else:
			cli.error('Option --met-root or environment variable MET_ROOT need to be set!')
	args.met_root = os.path.abspath(args.met_root)
	if not os.path.isdir(args.met_root):
		cli.error(f'Directory {args.met_root} does not exist!')
コード例 #21
0
def run_wrfda_update_bc(work_root,
                        wrfda_root,
                        update_lowbc,
                        config,
                        args,
                        wrf_work_dir=None,
                        wrfbdy=None,
                        tag=None):
    start_time = config['custom']['start_time']
    datetime_fmt = 'YYYY-MM-DD_HH:mm:ss'
    start_time_str = start_time.format(datetime_fmt)
    max_dom = config['domains']['max_dom']

    if not wrf_work_dir:
        if tag != None:
            wrf_work_dir = f'{work_root}/wrf_{tag}'
        else:
            wrf_work_dir = f'{work_root}/wrf'

    if max_dom > 1:
        dom_str = 'd' + str(config['custom']['wrfda']['dom'] + 1).zfill(2)
        if tag != None:
            wrfda_work_dir = f'{work_root}/wrfda_{tag}/{dom_str}'
        else:
            wrfda_work_dir = f'{work_root}/wrfda/{dom_str}'
    else:
        dom_str = 'd01'
        if tag != None:
            wrfda_work_dir = f'{work_root}/wrfda_{tag}'
        else:
            wrfda_work_dir = f'{work_root}/wrfda'
    if not os.path.isdir(wrfda_work_dir): os.mkdir(wrfda_work_dir)
    os.chdir(wrfda_work_dir)

    if not wrfbdy: wrfbdy = f'{wrf_work_dir}/wrfbdy_{dom_str}'

    cli.stage(f'Run WRFDA update_bc at {wrfda_work_dir} ...')

    expected_files = [wrfbdy, f'wrfvar_output_{start_time_str}', 'fg']
    if not check_files(expected_files):
        print(expected_files)
        cli.error(
            'run_wrfda_update_bc: da_wrfvar.exe or real.exe wasn\'t executed successfully!'
        )
    run(f'ln -sf {wrfbdy} wrfbdy_{dom_str}')
    run(f'ln -sf wrfvar_output_{start_time_str} wrfvar_output')

    parame_in = f90nml.read(f'{wrfda_root}/var/test/update_bc/parame.in')
    parame_in['control_param']['wrf_input'] = './fg'
    if update_lowbc:
        cli.notice('Update only low boundary condition.')
        parame_in['control_param']['low_bdy_only'] = True
    parame_in.write(f'{wrfda_work_dir}/parame.in', force=True)

    if update_lowbc:
        expected_file = f'wrfbdy_{dom_str}_{start_time_str}.low_updated'
    else:
        expected_file = f'wrfbdy_{dom_str}_{start_time_str}.lateral_updated'
    if not check_files(expected_file) or args.force:
        submit_job(f'{wrfda_root}/var/build/da_update_bc.exe',
                   1,
                   config,
                   args,
                   wait=True)
        run(f'cp wrfbdy_{dom_str} {expected_file}')
    else:
        run(f'ls -l {expected_file}')

    cli.notice('Succeeded.')
コード例 #22
0
def config_wrfda(work_root,
                 wrfda_root,
                 config,
                 args,
                 wrf_work_dir=None,
                 tag=None,
                 fg=None):
    start_time = config['custom']['start_time']
    end_time = config['custom']['end_time']
    datetime_fmt = 'YYYY-MM-DD_HH:mm:ss'
    start_time_str = start_time.format(datetime_fmt)
    max_dom = config['domains']['max_dom']

    # Need to take some parameters from wrfinput file.
    if not wrf_work_dir:
        if tag != None:
            wrf_work_dir = f'{work_root}/wrf_{tag}'
        else:
            wrf_work_dir = f'{work_root}/wrf'

    if max_dom > 1:
        if not has_key(config, ('custom', 'wrfda', 'dom')):
            cli.error(
                'You need to set custom->wrfda->dom to set which domain to work on!'
            )
        dom_idx = config['custom']['wrfda']['dom']
        dom_str = 'd' + str(dom_idx + 1).zfill(2)
        if tag != None:
            wrfda_work_dir = f'{work_root}/wrfda_{tag}/{dom_str}'
        else:
            wrfda_work_dir = f'{work_root}/wrfda/{dom_str}'
    else:
        dom_idx = 0
        dom_str = 'd01'
        if tag != None:
            wrfda_work_dir = f'{work_root}/wrfda_{tag}'
        else:
            wrfda_work_dir = f'{work_root}/wrfda'
    if not os.path.isdir(wrfda_work_dir): os.makedirs(wrfda_work_dir)
    os.chdir(wrfda_work_dir)

    version = wrf_version(wrfda_root)

    if os.path.isfile(f'{wrf_work_dir}/wrfinput_{dom_str}'):
        f = Dataset(f'{wrf_work_dir}/wrfinput_{dom_str}')
    elif os.path.isfile(f'{wrf_work_dir}/wrfout_{dom_str}_{start_time_str}'):
        f = Dataset(f'{wrf_work_dir}/wrfout_{dom_str}_{start_time_str}')
    elif fg:
        f = Dataset(fg)
    else:
        cli.error(
            f'config_wrfda: Cannot find wrfinput or wrfout in {wrf_work_dir} or wrfvar!'
        )
    num_land_cat = f.getncattr('NUM_LAND_CAT')
    hypsometric_opt = f.getncattr('HYPSOMETRIC_OPT')
    f.close()

    time_window = get_value(config, ('custom', 'wrfda', 'time_window'), 360)
    # Read in namelist template (not exact Fortran namelist format, we need to change it).
    template = open(f'{wrfda_root}/var/README.namelist').read()
    template = re.sub(r'^[^&]*', '', template, flags=re.DOTALL)
    template = re.sub(r';.*', '', template)
    template = re.sub(r'\([^\)]*\)', '', template)
    namelist_input = f90nml.read(StringIO(template))
    namelist_input['wrfvar1']['var4d_lbc'] = False
    namelist_input['wrfvar18']['analysis_date'] = start_time_str
    namelist_input['wrfvar21']['time_window_min'] = start_time.subtract(
        minutes=time_window / 2).format(datetime_fmt)
    namelist_input['wrfvar22']['time_window_max'] = start_time.add(
        minutes=time_window / 2).format(datetime_fmt)
    # Fix bugs
    namelist_input['wrfvar2']['qc_rej_both'] = False
    namelist_input['wrfvar14']['rtminit_satid'] = -1
    namelist_input['wrfvar14']['rtminit_sensor'] = -1
    if version == Version('3.6.1'):
        namelist_input['wrfvar4']['use_iasiobs'] = False
        del namelist_input['wrfvar4']['use_iasisobs']
        namelist_input['wrfvar4']['use_seviriobs'] = False
        del namelist_input['wrfvar4']['use_sevirisobs']
        namelist_input['wrfvar5']['max_omb_spd'] = namelist_input['wrfvar5'][
            'max_omb_sp']
        del namelist_input['wrfvar5']['max_omb_sp']
        namelist_input['wrfvar5']['max_error_spd'] = namelist_input['wrfvar5'][
            'max_error_sp']
        del namelist_input['wrfvar5']['max_error_sp']
    elif version > Version('3.8.1'):
        namelist_input['wrfvar11']['write_detail_grad_fn'] = True
    namelist_input['wrfvar11']['calculate_cg_cost_fn'] = True
    # Merge namelist.input in tutorial.
    tmp = f90nml.read(f'{wrfda_root}/var/test/tutorial/namelist.input')
    for key, value in tmp.items():
        if not key in namelist_input:
            namelist_input[key] = value
    namelist_input['time_control']['run_hours'] = config['custom'][
        'forecast_hours']
    namelist_input['time_control']['start_year'] = [
        int(start_time.format("Y")) for i in range(max_dom)
    ]
    namelist_input['time_control']['start_month'] = [
        int(start_time.format("M")) for i in range(max_dom)
    ]
    namelist_input['time_control']['start_day'] = [
        int(start_time.format("D")) for i in range(max_dom)
    ]
    namelist_input['time_control']['start_hour'] = [
        int(start_time.format("H")) for i in range(max_dom)
    ]
    namelist_input['time_control']['end_year'] = [
        int(end_time.format("Y")) for i in range(max_dom)
    ]
    namelist_input['time_control']['end_month'] = [
        int(end_time.format("M")) for i in range(max_dom)
    ]
    namelist_input['time_control']['end_day'] = [
        int(end_time.format("D")) for i in range(max_dom)
    ]
    namelist_input['time_control']['end_hour'] = [
        int(end_time.format("H")) for i in range(max_dom)
    ]
    namelist_input['time_control']['frames_per_outfile'] = [
        1 for i in range(max_dom)
    ]
    for key, value in config['time_control'].items():
        namelist_input['time_control'][key] = value
    for key, value in config['domains'].items():
        namelist_input['domains'][key] = value
    # WRFDA only take grids parameters one domain at a time.
    namelist_input['domains']['max_dom'] = 1
    for key in ('e_we', 'e_sn', 'e_vert', 'dx', 'dy', 'grid_id', 'parent_id',
                'i_parent_start', 'j_parent_start', 'parent_grid_ratio',
                'parent_time_step_ratio'):
        if key in config['domains']:
            namelist_input['domains'][key] = config['domains'][key][dom_idx]
    namelist_input['domains']['hypsometric_opt'] = hypsometric_opt
    # Sync physics parameters.
    if 'physics' in config:
        for key, value in config['physics'].items():
            namelist_input['physics'][key] = value
    namelist_input['physics']['num_land_cat'] = num_land_cat
    if version == Version('3.9.1'):
        namelist_input['dynamics']['gwd_opt'] = 0
    # Write customized parameters.
    for tag in range(1, 23):
        section = f'wrfvar{tag}'
        for key, value in config[section].items():
            namelist_input[section][key] = value

    # Validate some parameters.
    for key in ('as1', 'as2', 'as3', 'as4', 'as5'):
        if namelist_input['wrfvar7'][key] == -1:
            cli.error(f'wrfvar7->{key} is -1!')

    namelist_input.write(f'{wrfda_work_dir}/namelist.input', force=True)

    cli.notice('Succeeded.')
コード例 #23
0
def build_gsi(wrf_root, gsi_root, args):
    # Check environment.
    if not 'HDF5' in os.environ:
        res = subprocess.run(['which', 'h5dump'], stdout=subprocess.PIPE)
        if res.returncode == 0:
            os.environ['HDF5'] = os.path.dirname(
                os.path.dirname(res.stdout.decode('utf-8')))
            cli.notice(f'Set HDF5 to {os.environ["HDF5"]}')
    if not 'HDF5' in os.environ:
        cli.warning('HDF5 environment variable is not set')

    if not 'NETCDF' in os.environ:
        res = subprocess.run(['which', 'ncdump'], stdout=subprocess.PIPE)
        if res.returncode == 0:
            os.environ['NETCDF'] = os.path.dirname(
                os.path.dirname(res.stdout.decode('utf-8')))
            cli.notice(f'Set NETCDF to {os.environ["NETCDF"]}')
    if not 'NETCDF' in os.environ:
        cli.warning('NETCDF environment variable is not set!')

    if not os.getenv('LAPACK_PATH') and args.compiler_suite != 'intel':
        cli.error('Shell variable LAPACK_PATH is not set!')

    version = gsi_version(args.gsi_root)

    if version <= Version('3.6'):
        # 3.7 changes: Added wrf interface as a library (wrflib). No need to compile WRF with GSI and EnKF.
        if not os.path.isdir(args.wrf_root):
            cli.error(f'WRF directory {args.wrf_root} does not exist!')
        os.chdir(args.wrf_root)
        expected_exe_files = ('main/wrf.exe')
        if not check_files(expected_exe_files):
            cli.error('WRF has not been built! Build it first.')

    os.chdir(args.gsi_root)

    if args.force: run('rm -rf build')
    if not os.path.isdir('build'): os.mkdir('build')
    os.chdir('build')
    if version == Version('3.6'):
        expected_exe_files = ('bin/gsi.x', 'lib/libbacio_v2.0.1.a',
                              'lib/libbufr_v10.2.5.a', 'lib/libcrtm_v2.2.3.a',
                              'lib/libenkfdeplib.a', 'lib/libenkflib.a',
                              'lib/libgsilib_shrd.a', 'lib/libgsilib_wrf.a',
                              'lib/libnemsio_v2.2.1.a',
                              'lib/libsfcio_v1.1.0.a', 'lib/libsigio_v2.0.1.a',
                              'lib/libsp_v2.0.2.a', 'lib/libw3emc_v2.2.0.a',
                              'lib/libw3nco_v2.0.6.a')
    elif version == Version('3.7'):
        expected_exe_files = ('bin/enkf_wrf.x', 'bin/gsi.x',
                              'lib/libbacio_v2.0.1.a', 'lib/libbufr_v10.2.5.a',
                              'lib/libcrtm_v2.2.3.a', 'lib/libenkfdeplib.a',
                              'lib/libenkflib.a', 'lib/libgsilib_shrd.a',
                              'lib/libgsilib_wrf.a', 'lib/libnemsio_v2.2.1.a',
                              'lib/libsfcio_v1.1.0.a', 'lib/libsigio_v2.0.1.a',
                              'lib/libsp_v2.0.2.a', 'lib/libw3emc_v2.2.0.a',
                              'lib/libw3nco_v2.0.6.a')
    if not check_files(expected_exe_files):
        cmake_args = f'-DBUILD_ENKF=ON -DBUILD_CORELIBS=ON -DUSE_WRF=ON -DBUILD_WRF=ON -DBUILD_GFS=OFF '
        if version == Version('3.6'):
            cli.notice('Fix GSI 3.6!')
            edit_file('../cmake/Modules/FindCORELIBS.cmake', [[
                '\${CMAKE_SOURCE_DIR}/libsrc', '${CMAKE_SOURCE_DIR}/lib/libsrc'
            ]])
            if args.compiler_suite == 'gnu':
                edit_file('../cmake/Modules/setCompilerFlags.cmake', [[
                    'set\(BACIO_Fortran_FLAGS " -O3 -fconvert=big-endian -ffree-form',
                    'set(BACIO_Fortran_FLAGS " -O3 -fconvert=big-endian'
                ]])
            elif args.compiler_suite == 'intel':
                edit_file('../cmake/Modules/setCompilerFlags.cmake', [[
                    'set \(BACIO_Fortran_FLAGS "-O3 -free -assume nocc_omp',
                    'set(BACIO_Fortran_FLAGS " -O3 -assume nocc_omp'
                ]])
            edit_file('../core-libs/sigio/CMakeLists.txt',
                      [['\*\.f\)', '*.f90)']])
            edit_file('../src/hybrid_ensemble_isotropic.F90',
                      [['stop\(123\)', 'stop 123']])
            edit_file('../src/setupoz.f90', [[
                'my_head%ij\(1\),my_head%wij\(1\)\)', 'my_head%ij,my_head%wij)'
            ]])
            cmake_args += f'-DWRFPATH={args.wrf_root}'
        if version == Version('3.7'):
            cli.notice('Fix GSI 3.7!')
            edit_file('../src/setuplight.f90',
                      [['my_head%wij\(1\)\)', 'my_head%wij)']])
            cli.warning(
                'GSI 3.7 has bug when rerun cmake, so clean all build files.')
            run('rm -rf ../build/*')
            cmake_args += '-DBUILD_UTIL_COM=ON'

        # Fix not-found -lnetcdf -lnetcdff.
        edit_file('../cmake/Modules/setCompilerFlags.cmake',
                  [['-lnetcdf -lnetcdff', '']])

        cli.notice('Configure GSI ...')
        if args.compiler_suite == 'gnu':
            cc = 'gcc'
            cxx = 'g++'
            fc = 'gfortran'
        elif args.compiler_suite == 'intel':
            cc = 'mpiicc'
            cxx = 'mpiicpc'
            fc = 'mpiifort'
        if args.verbose:
            run(f'CC={cc} CXX={cxx} FC={fc} cmake .. {cmake_args}')
        else:
            run(f'CC={cc} CXX={cxx} FC={fc} cmake .. {cmake_args} &> cmake.out'
                )

        cli.notice('Compile GSI ...')
        if args.verbose:
            run('make')
        else:
            run('make &> make.out')

        if check_files(expected_exe_files):
            cli.notice('Succeeded.')
        else:
            if args.verbose:
                cli.error('Failed')
            else:
                cli.error(f'Failed! Check {args.gsi_root}/build/make.out')
    else:
        cli.notice('GSI has already been built.')

    if version == Version('3.6'):
        os.chdir(f'{args.gsi_root}/util/bufr_tools')
        if args.force: run('make clean')
        expected_exe_files = (
            'bufr_append_sample.exe', 'bufr_decode_radiance.exe',
            'bufr_decode_sample.exe', 'bufr_encode_sample.exe',
            'prepbufr_append_retrieve.exe', 'prepbufr_append_surface.exe',
            'prepbufr_append_upperair.exe', 'prepbufr_decode_all.exe',
            'prepbufr_encode_surface.exe', 'prepbufr_encode_upperair.exe',
            'prepbufr_inventory.exe')
        if not check_files(expected_exe_files):
            edit_file('makefile', [['^\s*FC\s*=.*$', f'FC = {fc}'],
                                   ['-I\.\./\.\./dtc', '-I../../build'],
                                   ['-L\.\./\.\./dtc', '-L../../build'],
                                   ['-lbufr_i4r8', '-lbufr_v10.2.5']])

            cli.notice('Compile bufr_tools ...')
            if args.verbose:
                run('make')
            else:
                run('make &> make.out')

            if check_files(expected_exe_files):
                cli.notice('Succeeded.')
            else:
                if args.verbose:
                    cli.error('Failed!')
                else:
                    cli.error(
                        f'Failed! Check {args.gsi_root}/util/bufr_tools/make.out'
                    )
        else:
            cli.notice('GSI bufr_tools has been built.')

        os.chdir(f'{args.gsi_root}/util/Analysis_Utilities/read_diag/')
        expected_exe_files = ('read_diag_conv.exe', 'read_diag_conv_ens.exe',
                              'read_diag_rad.exe')
        if not check_files(expected_exe_files):
            edit_file('makefile', [[
                'include \.\./\.\./\.\./dtc/configure.gsi', ''
            ], ['\$\(SFC\)', fc], ['-I\.\./\.\./\.\./dtc', '-I../../../build'],
                                   [
                                       '-L\.\./\.\./\.\./src -lgsi',
                                       '-L../../../build/lib -lgsilib_shrd'
                                   ],
                                   [
                                       'FLAGS= \$\(FFLAGS_DEFAULT\)',
                                       'FLAGS = -fconvert=big-endian'
                                   ]])

            cli.notice('Compile read_diag ...')
            if args.verbose:
                run('make')
            else:
                run('make &> make.out')

            if check_files(expected_exe_files):
                cli.notice('Succeeded.')
            else:
                if args.verbose:
                    cli.error('Failed')
                else:
                    cli.error(
                        f'Failed! Check {args.gsi_root}/util/Analysis_Utilities/read_diag/make.out'
                    )
        else:
            cli.notice('GSI read_diag has been built.')
コード例 #24
0
    help=
    'Use PBS job management system variants (e.g. TORQUE) to run MPI jobs.',
    action='store_true')
parser.add_argument('-v',
                    '--verbose',
                    help='Print out work log',
                    action='store_true')
parser.add_argument('-f', '--force', help='Force to run', action='store_true')
args = parser.parse_args()

if not args.work_root:
    if os.getenv('WORK_ROOT'):
        args.work_root = os.getenv('WORK_ROOT')
    else:
        cli.error(
            'Option --work-root or environment variable WORK_ROOT need to be set!'
        )
args.work_root = os.path.abspath(args.work_root)
if not os.path.isdir(args.work_root):
    os.makedirs(args.work_root)
    cli.notice(f'Create work directory {args.work_root}.')

if not args.wrf_root:
    if os.getenv('WRF_ROOT'):
        args.wrf_root = os.getenv('WRF_ROOT')
    elif args.codes:
        args.wrf_root = args.codes + '/WRF'
    else:
        cli.error(
            'Option --wrf-root or environment variable WRF_ROOT need to be set!'
        )
コード例 #25
0
                        help='Root directory of all codes (e.g. WRF, UPP)')
    parser.add_argument('-n',
                        '--nceplibs-root',
                        dest='nceplibs_root',
                        help='NCEPlibs root directory (e.g. NCEPLIBS')
    parser.add_argument('-s',
                        '--compiler-suite',
                        dest='compiler_suite',
                        help='Compiler suite',
                        choices=['gnu', 'pgi', 'intel'])
    parser.add_argument('-f',
                        '--force',
                        help='Force to rebuild if already built',
                        action='store_true')
    args = parser.parse_args()

    if not args.nceplibs_root:
        if os.getenv('NCEPLIBS_ROOT'):
            args.nceplibs_root = os.getenv('NCEPLIBS_ROOT')
        elif args.codes:
            args.nceplibs_root = args.codes + '/NCEPLIBS'
        else:
            cli.error(
                'Option --nceplibs-root or environment variable NCEPLIBS_ROOT need to be set!'
            )
    args.nceplibs_root = os.path.abspath(args.nceplibs_root)
    if not os.path.isdir(args.nceplibs_root):
        cli.error(f'Directory {args.nceplibs_root} does not exist!')

    build_nceplibs(args.nceplibs_root, args)
コード例 #26
0
	parser.add_argument(      '--openmp', help='Use OpenMP parallelism.', action='store_true')
	parser.add_argument('-j', '--jobs', help='Set job size to compile.', type=int, default=2)
	parser.add_argument(      '--rttov', help='Use RTTOV for satelliate DA.')
	parser.add_argument('-s', '--compiler-suite', dest='compiler_suite', help='Compiler suite', choices=['gnu', 'pgi', 'intel'], required=True)
	parser.add_argument('-f', '--force', help='Force to rebuild if already built', action='store_true')
	parser.add_argument('-d', '--debug', help='Build WRF with debug compile options', action='store_true')
	parser.add_argument('-v', '--verbose', help='Print out build log', action='store_true')
	args = parser.parse_args()

	if not args.wrf_root:
		if os.getenv('WRF_ROOT'):
			args.wrf_root = os.getenv('WRF_ROOT')
		elif args.codes:
			args.wrf_root = args.codes + '/WRF'
		else:
			cli.error('Option --wrf-root or environment variable WRF_ROOT need to be set!')
	args.wrf_root = os.path.abspath(args.wrf_root)
	if not os.path.isdir(args.wrf_root):
		cli.error(f'Directory {args.wrf_root} does not exist!')

	if not args.wps_root:
		if os.getenv('WPS_ROOT'):
			args.wps_root = os.getenv('WPS_ROOT')
		elif args.codes:
			args.wps_root = args.codes + '/WPS'
		else:
			cli.error('Option --wps-root or environment variable WPS_ROOT need to be set!')
	args.wps_root = os.path.abspath(args.wps_root)
	if not os.path.isdir(args.wps_root):
		cli.error(f'Directory {args.wps_root} does not exist!')
コード例 #27
0
def run_wrfda_3dvar(work_root,
                    wrfda_root,
                    config,
                    args,
                    wrf_work_dir=None,
                    force=False,
                    tag=None,
                    fg=None):
    start_time = config['custom']['start_time']
    datetime_fmt = 'YYYY-MM-DD_HH:mm:ss'
    start_time_str = start_time.format(datetime_fmt)
    max_dom = config['domains']['max_dom']

    if not wrf_work_dir:
        if tag != None:
            wrf_work_dir = f'{work_root}/wrf_{tag}'
        else:
            wrf_work_dir = f'{work_root}/wrf'

    if tag != None:
        obsproc_work_dir = f'{work_root}/wrfda_{tag}/obsproc'
    else:
        obsproc_work_dir = f'{work_root}/wrfda/obsproc'

    if max_dom > 1:
        dom_str = 'd' + str(config['custom']['wrfda']['dom'] + 1).zfill(2)
        if tag != None:
            wrfda_work_dir = f'{work_root}/wrfda_{tag}/{dom_str}'
        else:
            wrfda_work_dir = f'{work_root}/wrfda/{dom_str}'
    else:
        dom_str = 'd01'
        if tag != None:
            wrfda_work_dir = f'{work_root}/wrfda_{tag}'
        else:
            wrfda_work_dir = f'{work_root}/wrfda'
    if not os.path.isdir(wrfda_work_dir): os.mkdir(wrfda_work_dir)
    os.chdir(wrfda_work_dir)

    cli.stage(f'Run da_wrfvar.exe at {wrfda_work_dir} ...')

    if os.path.isfile(f'wrfvar_output_{start_time_str}'
                      ) and not args.force and not force:
        run(f'ls -l wrfvar_output_{start_time_str}')
        cli.notice(f'wrfvar_output_{start_time_str} already exist.')
        return

    run(f'ln -sf {wrfda_root}/run/LANDUSE.TBL {wrfda_work_dir}')

    if not os.path.isfile('namelist.input'):
        cli.error(
            'namelist.input has not been generated! Run config_wrfda.py.')

    # BE matrix
    if 'cv_options' in config['wrfvar7']:
        be_work_dir = os.path.dirname(
            os.path.abspath(work_root)) + '/be/' + dom_str
        if not os.path.isdir(be_work_dir):
            be_work_dir = os.path.dirname(
                os.path.abspath(work_root)) + '/../be/' + dom_str

        if config['wrfvar7']['cv_options'] == 5:
            if not os.path.isfile(f'{be_work_dir}/be.dat.cv5'):
                cli.error(
                    f'BE matrix {be_work_dir}/be.dat.cv5 does not exist!')
            run(f'ln -sf {be_work_dir}/be.dat.cv5 be.dat')
        elif config['wrfvar7']['cv_options'] == 6:
            if not os.path.isfile(f'{be_work_dir}/be.dat.cv6'):
                cli.error(
                    f'BE matrix {be_work_dir}/be.dat.cv6 does not exist!')
            run(f'ln -sf {be_work_dir}/be.dat.cv6 be.dat')
        elif config['wrfvar7']['cv_options'] == 7:
            if not os.path.isfile(f'{be_work_dir}/be.dat.cv7'):
                cli.error(
                    f'BE matrix {be_work_dir}/be.dat.cv7 does not exist!')
            run(f'ln -sf {be_work_dir}/be.dat.cv7 be.dat')
    if not os.path.exists('./be.dat'):
        run(f'ln -sf {wrfda_root}/var/run/be.dat.cv3 be.dat')

    # First guess
    # TODO: Assume there is only one domain to be assimilated.
    if fg != None:
        run(f'ln -sf {fg} {wrfda_work_dir}/fg')
    else:
        expected_files = [
            '{}/wrfout_d{:02d}_{}'.format(wrf_work_dir, i + 1, start_time_str)
            for i in range(max_dom)
        ]
        if check_files(expected_files):
            run(f'ln -sf {wrf_work_dir}/wrfout_{dom_str}_{start_time_str} {wrfda_work_dir}/fg'
                )
        else:
            expected_files = [
                '{}/wrfinput_d{:02d}_{}'.format(wrf_work_dir, i + 1,
                                                start_time_str)
                for i in range(max_dom)
            ]
            if not check_files(expected_files):
                cli.error(
                    'real.exe or da_update_bc.exe wasn\'t executed successfully!'
                )
            run(f'ln -sf {wrf_work_dir}/wrfinput_{dom_str}_{start_time_str} {wrfda_work_dir}/fg'
                )

    # Observation data
    if config['custom']['wrfda']['type'] == '3dvar':
        if 'use_radarobs' in config['wrfvar4'] and config['wrfvar4'][
                'use_radarobs']:
            # Radar data
            run(f'rm -f ob.*')
            for obs_radar_file in glob(
                    f'{args.littler_root}/{start_time.format("YYYYMMDD")}/obs.radar.*'
            ):
                radar_time = pendulum.from_format(
                    os.path.basename(obs_radar_file).split('.')[2],
                    'YYYYMMDDHHmm')
                if radar_time == start_time:
                    run(f'ln -sf {obs_radar_file} ob.radar')
            if os.path.isfile(f'wrfvar_output_{start_time_str}'):
                cli.notice('Use previous analysis data as the background.')
                run(f'mv wrfvar_output_{start_time_str} wrfvar_output_conv_{start_time_str}'
                    )
                run(f'ln -sf wrfvar_output_conv_{start_time_str} fg')
        elif 'conv_obs' in config['custom']:
            if 'dir_pattern' in config['custom']['conv_obs']:
                obs_dir = Template(
                    config['custom']['conv_obs']['dir_pattern']).render(
                        obs_time=start_time)
            if 'file_pattern' in config['custom']['conv_obs']:
                obs_file = Template(
                    config['custom']['conv_obs']['file_pattern']).render(
                        obs_time=start_time)
            if config['wrfvar3']['ob_format'] == 1:
                run(f'ln -sf {args.prepbufr_root}/{obs_dir}/{obs_file} ob.bufr'
                    )
            elif config['wrfvar3']['ob_format'] == 2:
                run(f'ln -sf {args.prepbufr_root}/{obs_dir}/{obs_file} ob.ascii'
                    )
        elif config['wrfvar3']['ob_format'] == 2 and os.path.isfile(
                f'{obsproc_work_dir}/obs_gts_{start_time.format(datetime_fmt)}.3DVAR'
        ):
            # LITTLE_R conventional data
            run(f'ln -sf {obsproc_work_dir}/obs_gts_{start_time.format(datetime_fmt)}.3DVAR ob.ascii'
                )
        elif config['wrfvar3']['ob_format'] == 1 and config['custom']['wrfda'][
                'prepbufr_source'] == 'gdas':
            # PREPBUFR conventional data
            gdas_file_path = f'{args.prepbufr_root}/gdas.{start_time.format("YYYYMMDD")}/gdas.t{start_time.hour:02}z.prepbufr.nr'
            if not os.path.isfile(gdas_file_path):
                cli.error(f'{gdas_file_path} does not exist!')
            run(f'ln -sf {gdas_file_path} ob.bufr')

    if os.path.isfile(f'{wrfda_work_dir}/wrfvar_output_{start_time_str}'
                      ) and not args.force:
        cli.notice(
            f'{wrfda_work_dir}/wrfvar_output_{start_time_str} already exists.')
        return

    submit_job(f'{wrfda_root}/var/build/da_wrfvar.exe',
               min(20, args.np),
               config,
               args,
               wait=True)

    expected_files = [f'wrfvar_output', 'statistics']
    if not check_files(expected_files):
        # Check if the failure is caused by parallel computing? Such as cv_options is zero in some process.
        if search_files('rsl.error.*',
                        'Invalid CV option chosen:  cv_options =    0'):
            cli.warning(
                'Failed to run da_wrfvar.exe in parallel. Try to run in serial.'
            )
            submit_job(f'{wrfda_root}/var/build/da_wrfvar.exe',
                       1,
                       config,
                       args,
                       wait=True)
            if not check_files(expected_files):
                cli.error(
                    f'Still failed! See {wrfda_work_dir}/rsl.error.0000.')
        else:
            cli.error(f'Failed! See {wrfda_work_dir}/rsl.error.0000.')
    else:
        print(open('statistics').read())
        run(f'ncl -Q {scripts_root}/../plots/plot_cost_grad_fn.ncl')
        run(f'cp wrfvar_output wrfvar_output_{start_time_str}')
        cli.notice('Succeeded.')
コード例 #28
0
def build_upp(wrf_root, upp_root, args):
    if wrf_root != None: os.environ['WRF_DIR'] = wrf_root

    if not 'HDF5' in os.environ:
        res = subprocess.run(['which', 'h5dump'], stdout=subprocess.PIPE)
        if res.returncode == 0:
            os.environ['HDF5'] = os.path.dirname(
                os.path.dirname(res.stdout.decode('utf-8')))
            cli.notice(f'Set HDF5 to {os.environ["HDF5"]}')
    if not 'HDF5' in os.environ:
        cli.warning('HDF5 environment variable is not set')

    if not 'NETCDF' in os.environ:
        res = subprocess.run(['which', 'ncdump'], stdout=subprocess.PIPE)
        if res.returncode == 0:
            os.environ['NETCDF'] = os.path.dirname(
                os.path.dirname(res.stdout.decode('utf-8')))
            cli.notice(f'Set NETCDF to {os.environ["NETCDF"]}')
    if not 'NETCDF' in os.environ:
        cli.warning('NETCDF environment variable is not set!')

    if not 'JASPERINC' in os.environ or not 'JASPERLIB' in os.environ:
        if 'JASPER_ROOT' in os.environ:
            os.environ['JASPERINC'] = os.environ['JASPER_ROOT'] + '/include'
            os.environ['JASPERLIB'] = os.environ['JASPER_ROOT'] + '/lib'
            cli.notice(f'Set JASPERINC to {os.environ["JASPERINC"]}.')
            cli.notice(f'Set JASPERLIB to {os.environ["JASPERLIB"]}.')
        else:
            cli.error(
                'JASPERINC and JASPERLIB environment variables are not set!')

    version = upp_version(args.upp_root)

    if version < Version('4.1'):
        expected_exe_files = ('bin/copygb.exe', 'bin/ndate.exe',
                              'bin/unipost.exe')
    else:
        expected_exe_files = ('exec/unipost.exe')
        if not check_files(expected_exe_files):
            if not args.nceplibs_root:
                args.nceplibs_root = f'{os.path.dirname(args.upp_root)}/NCEPLIBS'
            if not os.path.isdir(args.nceplibs_root):
                cli.error('NCEPLIBS is not ready!')
        os.environ['NCEPLIBS_DIR'] = args.nceplibs_root

    if not check_files(expected_exe_files):
        os.chdir(upp_root)
        if args.force: run('./clean -a &> /dev/null')
        cli.notice('Configure UPP ...')
        child = pexpect.spawn('./configure')
        child.expect('Enter selection.*')
        if args.compiler_suite == 'intel':
            child.sendline('4')  # Linux x86_64, Intel compiler (dmpar)
        elif args.compiler_suite == 'gnu':
            child.sendline('8')  # Linux x86_64, gfortran compiler (dmpar)
        elif args.compiler_suite == 'pgi':
            child.sendline(
                '14')  # Linux x86_64, PGI compiler: -f90=pgf90  (dmpar)
        child.wait()

        if args.compiler_suite == 'intel':
            edit_file('./configure.upp',
                      [['mpif90', 'mpiifort'], ['mpicc', 'mpiicc']])

        if 'LIBPNG_ROOT' in os.environ:
            edit_file('./configure.upp', [
                ['-lpng', f'-L{os.environ["LIBPNG_ROOT"]}/lib -lpng'],
                [
                    'GRIB2SUPT_INC\s*=\s*(.*)',
                    f'GRIB2SUPT_INC = \\1 -I{os.environ["LIBPNG_ROOT"]}/include'
                ]
            ])

        cli.notice('Compile UPP ...')
        run('./compile &> compile.out')

        if check_files(expected_exe_files):
            cli.notice('Succeeded.')
        else:
            cli.error(f'Failed! Check {upp_root}/compile.out')
    else:
        cli.notice('UPP is already built.')
コード例 #29
0
    help=
    'Use PBS job management system variants (e.g. TORQUE) to run MPI jobs.',
    action='store_true')
parser.add_argument('-v',
                    '--verbose',
                    help='Print out work log',
                    action='store_true')
parser.add_argument('-f', '--force', help='Force to run', action='store_true')
args = parser.parse_args()

if not args.work_root:
    if os.getenv('WORK_ROOT'):
        args.work_root = os.getenv('WORK_ROOT')
    else:
        cli.error(
            'Option --work-root or environment variable WORK_ROOT need to be set!'
        )
args.work_root = os.path.abspath(args.work_root)
if not os.path.isdir(args.work_root):
    os.makedirs(args.work_root)
    cli.notice(f'Create work directory {args.work_root}.')

if not args.wrf_root:
    if os.getenv('WRF_ROOT'):
        args.wrf_root = os.getenv('WRF_ROOT')
    elif args.codes:
        args.wrf_root = args.codes + '/WRF'
    else:
        cli.error(
            'Option --wrf-root or environment variable WRF_ROOT need to be set!'
        )
コード例 #30
0
	if res.status_code not in (200, 301):
		print(res.status_code)
		print(f'{root_url}/{prefix}.{start_time.format("YYYYMMDD")}/{start_time.format("HH")}')
		cli.error(f'Remote GFS data at {start_time} do not exist!')

	for forecast_hour in forecast_hours:
		download_gfs(start_time, forecast_hour)

if __name__ == '__main__':
	parser = argparse.ArgumentParser(description="Run WRF model and its friends.\n\nLongrun Weather Inc., NWP operation software.\nCopyright (C) 2018 - All Rights Reserved.", formatter_class=argparse.RawTextHelpFormatter)
	parser.add_argument('-o', '--output-root', dest='output_root', default='.', help='Root directory to store GFS data.')
	parser.add_argument('-s', '--start-time', dest='start_time', help='Download GFS data start in this date time (YYYYMMDDHH).', type=parse_time)
	parser.add_argument('-f', '--forecast-hours', dest='forecast_hours', help='Download forecast hours (HH-HH+XX).', type=parse_forecast_hours)
	parser.add_argument('-e', '--resolution', help='Set GFS resolution (1p00, 0p50, 0p25).', choices=('1p00', '0p50', '0p25'), default='0p25')
	parser.add_argument('-g', '--gdas', help='Use GDAS analysis.', action='store_true')
	args = parser.parse_args()

	if args.gdas:
		prefix = 'gdas'
	else:
		prefix = 'gfs'

	if not args.output_root:
		if os.getenv('RAWDATA_ROOT'):
			args.output_root = os.getenv('RAWDATA_ROOT') + '/' + prefix
		else:
			cli.error('Option --output-root or environment variable RAWDATA_ROOT need to be set!')
	args.output_root = os.path.abspath(args.output_root)

	get_gfs(args.output_root, args.start_time, args.forecast_hours, args.resolution, prefix, args)