Beispiel #1
0
 def _check_for_new_version(self):
     if hasattr(self, 'install_new_version_btn'):
         self.install_new_version_btn.setVisible(False)
     updater.get_latest_release(
         channel=Config().update_channel,
         branch=Version(global_.APP_VERSION),
         success_callback=I.update_config_tab,
     )
Beispiel #2
0
def run_wrfplus_ad(work_root, wrfplus_root, config, args):
	start_time = config['custom']['start_time']
	end_time = config['custom']['end_time']
	datetime_fmt = 'YYYY-MM-DD_HH:mm:ss'
	start_time_str = start_time.format(datetime_fmt)
	max_dom = config['domains']['max_dom']

	wrf_work_dir = os.path.abspath(work_root) + '/wrf'
	if not os.path.isdir(wrf_work_dir):
		cli.error(f'WRF work directory {wrf_work_dir} does not exist!')

	wrfplus_work_dir = os.path.abspath(work_root) + '/wrfplus'
	if not os.path.isdir(wrfplus_work_dir):
		cli.error(f'WRFPLUS has not been configured! Run config_wrfplus.py first.')
	os.chdir(wrfplus_work_dir)

	if os.path.isfile(f'{wrf_work_dir}/wrfinput_d01_{start_time_str}'):
		run(f'ln -sf {wrf_work_dir}/wrfinput_d01 .')
	elif os.path.isfile(f'{wrf_work_dir}/wrfout_d01_{start_time_str}'):
		run(f'ln -sf {wrf_work_dir}/wrfout_d01_{start_time_str} wrfinput_d01')
	run(f'ln -sf {wrf_work_dir}/wrfbdy_d01 .')
	if not os.path.isfile('final_sens_d01'):
		cli.error('There is no final_sens_d01 file!')

	version = wrf_version(wrfplus_root)

	cli.stage(f'Run WRFPLUS at {wrfplus_work_dir} ...')
	expected_files = ['wrfout_d{:02d}_{}'.format(i + 1, start_time_str) for i in range(max_dom)]
	expected_files.append(f'init_sens_d01_{start_time_str}')
	if not check_files(expected_files) or args.force:
		run('rm -f wrfout_*')
		run(f'ln -sf {wrfplus_root}/run/LANDUSE.TBL .')
		run(f'ln -sf {wrfplus_root}/run/VEGPARM.TBL .')
		run(f'ln -sf {wrfplus_root}/run/SOILPARM.TBL .')
		run(f'ln -sf {wrfplus_root}/run/GENPARM.TBL .')
		run(f'ln -sf {wrfplus_root}/run/RRTM_DATA_DBL RRTM_DATA')
		run(f'ln -sf {wrfplus_root}/run/ETAMPNEW_DATA_DBL ETAMPNEW_DATA')
		if version >= Version('4.0'):
			cmd = f'{wrfplus_root}/run/wrfplus.exe'
		else:
			cmd = f'{wrfplus_root}/run/wrf.exe'
		retries = 0
		while True:
			submit_job(cmd, args.np, config, args, wait=True)
			if os.path.isfile(f'gradient_wrfplus_d01_{start_time_str}'):
				run(f'mv gradient_wrfplus_d01_{start_time_str} init_sens_d01_{start_time_str}')
			if not check_files(expected_files):
				if retries == 10:
					cli.error(f'Failed! Check output {os.path.abspath(wrfplus_work_dir)}/rsl.error.0000.')
				retries = retries + 1
				cli.warning('Failed to run wrfplus, retry it!')
			else:
				break
		cli.notice('Succeeded.')
	else:
		cli.notice('File wrfout_* already exist.')
	run(f'ls -l {wrfplus_work_dir}/wrfout_*')
Beispiel #3
0
def add_to_db(wrap: str, versions: T.List[T.Tuple[str, str]], releases: T.Dict[str, T.Dict[str, T.List[str]]]):
    releases.setdefault(wrap, {})
    releases[wrap].setdefault('versions', [])
    releases[wrap].setdefault('dependency_names', [])
    releases[wrap].setdefault('program_names', [])
    versions: T.List[Version] = [Version(f'{version}-{revision}') for version, revision in versions]
    versions = sorted(versions, reverse=True)
    versions: T.List[str] = [v._s for v in versions]
    progs, deps = get_provide(wrap)
    releases[wrap]['versions'] = versions
    releases[wrap]['program_names'] = progs
    releases[wrap]['dependency_names'] = deps
Beispiel #4
0
def config_wps(work_root, wps_root, geog_root, config, args):
    if has_key(config, ('custom', 'start_time')):
        start_time = config['custom']['start_time']
        start_time_str = start_time.format('YYYY-MM-DD_HH:mm:ss')
        if not has_key(config, ('custom', 'end_time')):
            cli.error('custom->end_time does not exist in config file!')
        end_time = config['custom']['end_time']
        end_time_str = end_time.format('YYYY-MM-DD_HH:mm:ss')

    if not has_key(config, ('domains', 'max_dom')):
        cli.error('domains->max_dom does not exist in config file!')
    max_dom = config['domains']['max_dom']

    wps_work_dir = work_root + '/wps'
    if not os.path.isdir(wps_work_dir): os.makedirs(wps_work_dir)
    os.chdir(wps_work_dir)

    version = wrf_version(wps_root)
    if version < Version('3.9.1'):
        cli.error(
            f'WPS {version} may not handle GFS data correctly! Please use WPS >= 3.9.1.'
        )

    cli.notice('Edit namelist.wps for WPS.')
    copy(f'{wps_root}/namelist.wps', 'namelist.wps')
    namelist_wps = f90nml.read('namelist.wps')
    namelist_wps['share']['max_dom'] = max_dom
    if has_key(config, ('custom', 'start_time')):
        namelist_wps['share']['start_date'] = [
            start_time_str for i in range(max_dom)
        ]
    if has_key(config, ('custom', 'end_time')):
        namelist_wps['share']['end_date'] = [
            end_time_str if i == 0 else start_time_str for i in range(max_dom)
        ]
    if has_key(config, ('custom', 'background')) and has_key(
            config, ('custom', 'background', 'interval_seconds')):
        namelist_wps['share']['interval_seconds'] = config['custom'][
            'background']['interval_seconds']
    namelist_wps['geogrid']['geog_data_path'] = geog_root
    for key, value in config['geogrid'].items():
        namelist_wps['geogrid'][key] = value
    namelist_wps['geogrid']['opt_geogrid_tbl_path'] = wps_work_dir
    namelist_wps['metgrid']['opt_metgrid_tbl_path'] = wps_work_dir
    if 'metgrid' in config:
        for key, value in config['metgrid'].items():
            namelist_wps['metgrid'][key] = value
    namelist_wps.write('./namelist.wps', force=True)
    run(f'ncl -Q {script_root}/../plots/plot_domains.ncl')
    cli.notice(f'Check {wps_work_dir}/wps_show_dom.pdf for domains.')

    cli.notice('Succeeded.')
Beispiel #5
0
def add_to_db(wrap, versions, releases):
    releases.setdefault(wrap, {})
    releases[wrap].setdefault('versions', [])
    releases[wrap].setdefault('dependency_names', [])
    releases[wrap].setdefault('program_names', [])
    versions = [
        Version(f'{version}-{revision}') for version, revision in versions
    ]
    versions = sorted(versions, reverse=True)
    versions = [v._s for v in versions]
    progs, deps = get_provide(wrap)
    releases[wrap]['versions'] = versions
    releases[wrap]['program_names'] = progs
    releases[wrap]['dependency_names'] = deps
Beispiel #6
0
 def update_config_tab(self, latest_release: AVRelease):
     self.remote_version.set_text_color('black')
     for x in ['stable', 'beta', 'alpha']:
         dcs_install = getattr(dcs_installs, x)
         if dcs_install:
             getattr(self, '{}_install'.format(x)).setText(dcs_install.install_path)
             getattr(self, '{}_variant'.format(x)).setText(dcs_install.saved_games)
             getattr(self, '{}_version'.format(x)).setText(dcs_install.version)
         else:
             getattr(self, '{}_install'.format(x)).setText('not found')
     self.update_channel_combo.set_index_from_text(Config().update_channel)
     if latest_release:
         app_version = Version(global_.APP_VERSION)
         self.latest_release = latest_release
         self.remote_version.setText(latest_release.version.version_str)
         if app_version < self.latest_release.version:
             self.remote_version.set_text_color('green')
         if app_version != self.latest_release.version:
             self.install_new_version_btn.setVisible(True)
Beispiel #7
0
    def __init__(self,
                 server_address,
                 version,
                 socket_type,
                 HandlerClass,
                 active=False):
        """
        :param server_address: tuple of host address and port
        :param version: IP version (4 or 6)
        :param protocol: Layer 4 protocol (TCP or UDP)
        :param HandlerClass: Class to handle, send and receive data
        :param active: Boolean to start server
        """

        self.server_address = server_address
        self._version = version

        try:
            validate_address(self.server_address[0], self.server_address[1],
                             version)
        except (ValueError, TypeError):
            raise

        self.__shutdown_flag = False
        self.__continue_flag = True
        self.HandlerClass = HandlerClass

        self.address_family = getattr(socket, Version(version).name)
        self.socket_type = socket_type
        self.socket = socket.socket(self.address_family, self.socket_type)

        if active:
            try:
                self.start()
            except:
                self.stop()
                raise
Beispiel #8
0
 def version(self):
     return str(Version(self._version))
Beispiel #9
0
#!/usr/bin/env python3

import argparse
import os
import sys
sys.path.append(f'{os.path.dirname(os.path.realpath(__file__))}/../utils')
from utils import Version, run, cli, check_files

parser = argparse.ArgumentParser(description='Get UPP.', formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument('--upp-version', dest='upp_version')
args = parser.parse_args()

args.upp_version = Version(args.upp_version)

cli.notice(f'Get UPP {args.upp_version} ...')

if args.upp_version >= Version('4.1'):
	run(f'wget -c https://github.com/NCAR/NCEPlibs/archive/upp_v{args.upp_version}_release.tar.gz')
	run(f'wget -c https://dtcenter.org/dfiles/code/upp/DTC_upp_v{args.upp_version}.tar.gz')
else:
	run(f'wget -c https://dtcenter.org/sites/default/files/code/DTC_upp_v{args.upp_version}.tar.gz')
Beispiel #10
0
def build_upp(wrf_root, upp_root, args):
    if wrf_root != None: os.environ['WRF_DIR'] = wrf_root

    if not 'HDF5' in os.environ:
        res = subprocess.run(['which', 'h5dump'], stdout=subprocess.PIPE)
        if res.returncode == 0:
            os.environ['HDF5'] = os.path.dirname(
                os.path.dirname(res.stdout.decode('utf-8')))
            cli.notice(f'Set HDF5 to {os.environ["HDF5"]}')
    if not 'HDF5' in os.environ:
        cli.warning('HDF5 environment variable is not set')

    if not 'NETCDF' in os.environ:
        res = subprocess.run(['which', 'ncdump'], stdout=subprocess.PIPE)
        if res.returncode == 0:
            os.environ['NETCDF'] = os.path.dirname(
                os.path.dirname(res.stdout.decode('utf-8')))
            cli.notice(f'Set NETCDF to {os.environ["NETCDF"]}')
    if not 'NETCDF' in os.environ:
        cli.warning('NETCDF environment variable is not set!')

    if not 'JASPERINC' in os.environ or not 'JASPERLIB' in os.environ:
        if 'JASPER_ROOT' in os.environ:
            os.environ['JASPERINC'] = os.environ['JASPER_ROOT'] + '/include'
            os.environ['JASPERLIB'] = os.environ['JASPER_ROOT'] + '/lib'
            cli.notice(f'Set JASPERINC to {os.environ["JASPERINC"]}.')
            cli.notice(f'Set JASPERLIB to {os.environ["JASPERLIB"]}.')
        else:
            cli.error(
                'JASPERINC and JASPERLIB environment variables are not set!')

    version = upp_version(args.upp_root)

    if version < Version('4.1'):
        expected_exe_files = ('bin/copygb.exe', 'bin/ndate.exe',
                              'bin/unipost.exe')
    else:
        expected_exe_files = ('exec/unipost.exe')
        if not check_files(expected_exe_files):
            if not args.nceplibs_root:
                args.nceplibs_root = f'{os.path.dirname(args.upp_root)}/NCEPLIBS'
            if not os.path.isdir(args.nceplibs_root):
                cli.error('NCEPLIBS is not ready!')
        os.environ['NCEPLIBS_DIR'] = args.nceplibs_root

    if not check_files(expected_exe_files):
        os.chdir(upp_root)
        if args.force: run('./clean -a &> /dev/null')
        cli.notice('Configure UPP ...')
        child = pexpect.spawn('./configure')
        child.expect('Enter selection.*')
        if args.compiler_suite == 'intel':
            child.sendline('4')  # Linux x86_64, Intel compiler (dmpar)
        elif args.compiler_suite == 'gnu':
            child.sendline('8')  # Linux x86_64, gfortran compiler (dmpar)
        elif args.compiler_suite == 'pgi':
            child.sendline(
                '14')  # Linux x86_64, PGI compiler: -f90=pgf90  (dmpar)
        child.wait()

        if args.compiler_suite == 'intel':
            edit_file('./configure.upp',
                      [['mpif90', 'mpiifort'], ['mpicc', 'mpiicc']])

        if 'LIBPNG_ROOT' in os.environ:
            edit_file('./configure.upp', [
                ['-lpng', f'-L{os.environ["LIBPNG_ROOT"]}/lib -lpng'],
                [
                    'GRIB2SUPT_INC\s*=\s*(.*)',
                    f'GRIB2SUPT_INC = \\1 -I{os.environ["LIBPNG_ROOT"]}/include'
                ]
            ])

        cli.notice('Compile UPP ...')
        run('./compile &> compile.out')

        if check_files(expected_exe_files):
            cli.notice('Succeeded.')
        else:
            cli.error(f'Failed! Check {upp_root}/compile.out')
    else:
        cli.notice('UPP is already built.')
Beispiel #11
0
    if not args.upp_root:
        if os.getenv('UPP_ROOT'):
            args.upp_root = os.getenv('UPP_ROOT')
        elif args.codes:
            args.upp_root = args.codes + '/UPP'
        else:
            cli.error(
                'Option --upp-root or environment variable UPP_ROOT need to be set!'
            )
    args.upp_root = os.path.abspath(args.upp_root)
    if not os.path.isdir(args.upp_root):
        cli.error(f'Directory {args.upp_root} does not exist!')

    version = upp_version(args.upp_root)

    if version < Version('4.0'):
        if not args.wrf_root:
            if os.getenv('WRF_ROOT'):
                args.wrf_root = os.getenv('WRF_ROOT')
            elif args.codes:
                args.wrf_root = args.codes + '/WRF'
            else:
                cli.error(
                    'Option --wrf-root or environment variable WRF_ROOT need to be set!'
                )
        args.wrf_root = os.path.abspath(args.wrf_root)
        if not os.path.isdir(args.wrf_root):
            cli.error(f'Directory {args.wrf_root} does not exist!')

    build_upp(args.wrf_root, args.upp_root, args)
Beispiel #12
0
def config_wrf(work_root, wrf_root, wrfda_root, config, args, tag=None):
    phys_config = config['physics'] if 'physics' in config else {}

    start_time = config['custom']['start_time']
    end_time = config['custom']['end_time']
    max_dom = config['domains']['max_dom']

    start_time_str = start_time.format('YYYY-MM-DD_HH:mm:ss')
    end_time_str = end_time.format('YYYY-MM-DD_HH:mm:ss')

    if tag != None:
        wrf_work_dir = f'{work_root}/wrf_{tag}'
    else:
        wrf_work_dir = f'{work_root}/wrf'
    if not os.path.isdir(wrf_work_dir): os.mkdir(wrf_work_dir)
    os.chdir(wrf_work_dir)

    version = wrf_version(wrf_root)

    if os.path.isfile(f'{wrf_work_dir}/wrfinput_d01_{start_time_str}'):
        num_land_cat = get_num_land_cat(
            f'{wrf_work_dir}/wrfinput_d01_{start_time_str}')
    elif os.path.isfile(f'{wrf_work_dir}/wrfinput_d01'):
        num_land_cat = get_num_land_cat(f'{wrf_work_dir}/wrfinput_d01')
    elif os.path.isfile(f'{wrf_work_dir}/wrfout_d01_{start_time_str}'):
        num_land_cat = get_num_land_cat(
            f'{wrf_work_dir}/wrfout_d01_{start_time_str}')
    else:
        cli.warning(
            f'Cannot get num_land_cat parameter from an existing wrfinput_d01 file in {wrf_work_dir}!'
        )
        num_land_cat = None

    cli.notice('Edit namelist.input for WRF.')
    copy(f'{wrf_root}/run/namelist.input', 'namelist.input')
    namelist_input = f90nml.read('namelist.input')
    namelist_input['time_control']['run_hours'] = config['custom'][
        'forecast_hours']
    namelist_input['time_control']['start_year'] = [
        int(start_time.format("Y")) for i in range(max_dom)
    ]
    namelist_input['time_control']['start_month'] = [
        int(start_time.format("M")) for i in range(max_dom)
    ]
    namelist_input['time_control']['start_day'] = [
        int(start_time.format("D")) for i in range(max_dom)
    ]
    namelist_input['time_control']['start_hour'] = [
        int(start_time.format("H")) for i in range(max_dom)
    ]
    namelist_input['time_control']['end_year'] = [
        int(end_time.format("Y")) for i in range(max_dom)
    ]
    namelist_input['time_control']['end_month'] = [
        int(end_time.format("M")) for i in range(max_dom)
    ]
    namelist_input['time_control']['end_day'] = [
        int(end_time.format("D")) for i in range(max_dom)
    ]
    namelist_input['time_control']['end_hour'] = [
        int(end_time.format("H")) for i in range(max_dom)
    ]
    namelist_input['time_control']['frames_per_outfile'] = [
        1 for i in range(max_dom)
    ]
    if 'background' in config['custom'] and 'interval_seconds' in config[
            'custom']['background']:
        namelist_input['time_control']['interval_seconds'] = config['custom'][
            'background']['interval_seconds']
    if 'time_control' in config:
        for key, value in config['time_control'].items():
            namelist_input['time_control'][key] = value
    for key, value in config['domains'].items():
        namelist_input['domains'][key] = value
    if 'physics_suite' in namelist_input['physics']:
        del namelist_input['physics']['physics_suite']
    for key, value in phys_config.items():
        namelist_input['physics'][key] = value
    if num_land_cat != None:
        namelist_input['physics']['num_land_cat'] = num_land_cat
    if 'dynamics' in config:
        for key, value in config['dynamics'].items():
            namelist_input['dynamics'][key] = value
    if version == Version('3.9.1'):
        namelist_input['dynamics']['gwd_opt'] = 0
    namelist_input.write('./namelist.input', force=True)

    cli.notice('Succeeded.')
Beispiel #13
0
from utils import Version

# Base URL
app_base = "/tango/rest"

# Server version
version = Version([0, 1, 2, "alpha"])

# No of worker processes
workers = 4

# Sanic debug mode
debug = True

# Host to bind
host = "0.0.0.0"

# Server port
port = 8000

# rc3 API mode, can be one of these:
# 	strict = follow the documentation
# 	implementation = follow existing mTango rc3 implementation
# The strict mode doesn't disable endpoints not supported in original mTango
rc3_mode = "strict"

# Proxy cache size (if set to None, cache grows indefinitely and nothing is removed)
cache_size = 64
Beispiel #14
0
if not os.path.isdir(args.geog_root):
    cli.error(f'Directory {args.geog_root} does not exist!')

if not args.bkg_root:
    if os.getenv('BKG_ROOT'):
        args.bkg_root = os.getenv('BKG_ROOT')
    else:
        cli.error(
            'Option --bkg-root or environment variable BKG_ROOT need to be set!'
        )
args.bkg_root = os.path.abspath(args.bkg_root)
if not os.path.isdir(args.bkg_root):
    cli.error(f'Directory {args.bkg_root} does not exist!')

version = wrf_version(args.wrf_root)
if version >= Version('4.0'):
    cli.error('WRFPLUS 4.0 does not pass tangient and adjoint tests!')
if not version in (Version('3.6.1'), Version('3.8.1'), Version('3.9.1')):
    cli.error(
        'Only WRF 3.6.1, 3.8.1 and 3.9.1 have been tested for FSO application!'
    )

config = parse_config(args.config_json)

if config['wrfvar3']['ob_format'] == 1:
    if not args.prepbufr_root:
        if os.getenv('PREPBUFR_ROOT'):
            args.prepbufr_root = os.getenv('PREPBUFR_ROOT')
        else:
            cli.error(
                'Option --prepbufr-root or environment variable PREPBUFR_ROOT need to be set!'
Beispiel #15
0
import argparse
import os
import sys
sys.path.append(f'{os.path.dirname(os.path.realpath(__file__))}/../utils')
from utils import Version, run, cli, check_files

parser = argparse.ArgumentParser(description='Get WRF and its friends.',
                                 formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument('--wrf-version', dest='wrf_version')
parser.add_argument('--wps-version', dest='wps_version')
args = parser.parse_args()

if args.wrf_version:
    cli.notice(f'Get WRF {args.wrf_version} ...')
    wrf_version = Version(args.wrf_version)
    if wrf_version >= Version('4.0'):
        run(f'wget -c https://github.com/wrf-model/WRF/archive/v{wrf_version}.tar.gz -O wrf-{wrf_version}.tar.gz'
            )
    else:
        run(f'wget -c https://github.com/wrf-model/WRF/archive/V{wrf_version}.tar.gz -O wrf-{wrf_version}.tar.gz'
            )
        run(f'wget -c https://www2.mmm.ucar.edu/wrf/src/WRFDA_V{wrf_version}.tar.gz'
            )
        if wrf_version == Version('3.6.1'):
            run('wget -c https://www2.mmm.ucar.edu/wrf/users/wrfda/download/WRFPLUS_V3.6.1_r7511.tar.gz'
                )
        else:
            run(f'wget -c https://www2.mmm.ucar.edu/wrf/src/WRFPLUS_V{wrf_version}.tar.gz'
                )
if args.wps_version:
Beispiel #16
0
    def test_releases(self):
        # Take list of git tags
        stdout = subprocess.check_output(['git', 'tag'])
        tags = [t.strip() for t in stdout.decode().splitlines()]

        with open('releases.json', 'r') as f:
            releases = json.load(f)

        # All tags must be in the releases file
        for t in tags:
            name, version = t.rsplit('_', 1)
            self.assertIn(name, releases)
            self.assertIn(version, releases[name]['versions'])

        # Verify keys are sorted
        self.assertEqual(sorted(releases.keys()), list(releases.keys()))

        # Get the list of wraps that has modified packagefiles
        with open(Path.home() / 'files.json', 'r') as f:
            changed_files = json.load(f)
        self.changed_wraps = set()
        for f in changed_files:
            if f.startswith('subprojects/packagefiles'):
                self.changed_wraps.add(f.split('/')[2])

        for name, info in releases.items():
            # Make sure we can load wrap file
            config = configparser.ConfigParser()
            config.read(f'subprojects/{name}.wrap')
            self.assertEqual(config.sections()[0], 'wrap-file')
            wrap_section = config['wrap-file']
            self.check_has_no_path_separators(wrap_section['directory'])
            self.check_has_no_path_separators(wrap_section['source_filename'])

            # Basic checks
            self.assertTrue(re.fullmatch('[a-z][a-z0-9._-]*', name))
            patch_directory = wrap_section.get('patch_directory')
            if patch_directory:
                patch_path = Path('subprojects', 'packagefiles', patch_directory)
                self.assertTrue(patch_path.is_dir())
                # FIXME: Not all wraps currently complies, only check for wraps we modify.
                if name in self.changed_wraps:
                    self.assertTrue(Path(patch_path, 'LICENSE.build').is_file())
                    self.check_files(patch_path)

            # Make sure it has the same deps/progs provided
            progs = []
            deps = []
            if 'provide' in config.sections():
                provide = config['provide']
                progs = [i.strip() for i in provide.get('program_names', '').split(',')]
                deps = [i.strip() for i in provide.get('dependency_names', '').split(',')]
                for k in provide:
                    if k not in {'dependency_names', 'program_names'}:
                        deps.append(k.strip())
            progs = [i for i in progs if i]
            deps = [i for i in deps if i]
            self.assertEqual(sorted(progs), sorted(info['program_names']))
            self.assertEqual(sorted(deps), sorted(info['dependency_names']))

            # Verify versions are sorted
            versions = info['versions']
            self.assertGreater(len(versions), 0)
            versions_obj = [Version(v) for v in versions]
            self.assertEqual(sorted(versions_obj, reverse=True), versions_obj)

            # The first version could be a new release, all others must have
            # a corresponding tag already.
            for i, v in enumerate(versions):
                t = f'{name}_{v}'
                ver, rev = v.rsplit('-', 1)
                self.assertTrue(re.fullmatch('[a-z0-9._]+', ver))
                self.assertTrue(re.fullmatch('[0-9]+', rev))
                if i == 0:
                    self.check_source_url(name, wrap_section, ver)
                if i == 0 and t not in tags:
                    self.check_new_release(name, info, wrap_section)
                else:
                    self.assertIn(t, tags)
Beispiel #17
0
def setup_request():
    g.user = None
    g.group = None

    if is_request_secure(
    ) and request.environ.get('wsgi.url_scheme') != 'https':
        request.environ['wsgi.url_scheme'] = 'https'

    if request.path.startswith('/api/swagger') or request.path.startswith(
            '/admin'):
        agent.ignore_transaction()

    api_key = request.headers.get('X-Wigo-API-Key')
    if not api_key and 'key' in request.args:
        request.args = request.args.copy()
        api_key = request.args.pop('key')

    if api_key:
        g.api_key = api_key

    api_version = request.headers.get('X-Wigo-API-Version')
    if not api_version:
        api_version = '1000000000.0.0'

    try:
        g.api_version = Version(api_version)
    except:
        raise ValidationException('Invalid version number',
                                  'X-Wigo-API-Version')

    # check api key auth
    if request.path.startswith('/api/hooks/'):
        # webhooks do their own auth
        pass
    elif request.path.startswith('/api/swagger'):
        pass
    elif request.path.startswith('/api') and api_key != app.config['API_KEY']:
        abort(403, message='Bad API key')

    # resolve by lat/long
    geolocation = request.headers.get('Geolocation')
    if geolocation:
        parsed_geo = urlparse(geolocation)
        if parsed_geo.scheme == 'geo':
            lat, lon = parsed_geo.path.split(',')
            lat, lon = float(lat), float(lon)
            if lat and lon:
                g.latitude, g.longitude = float(lat), float(lon)
                try:
                    g.group = Group.find(lat=g.latitude, lon=g.longitude)
                except DoesNotExist:
                    logger.info(
                        'could not resolve group from geo, lat={}, lon={}'.
                        format(g.latitude, g.longitude))

    city_id = request.headers.get('X-Wigo-City-ID')
    if city_id:
        g.group = Group.find(city_id=int(city_id))

    group_id = request.headers.get('X-Wigo-Group-ID')
    if group_id:
        g.group = Group.find(int(group_id))

    # setup the user after the geo lookup, since the user might need to update its group
    setup_user_by_token()
Beispiel #18
0
def build_wrf(wrf_root, wps_root, wrfplus_root, wrfda_root, args):
	if not 'HDF5' in os.environ:
		res = subprocess.run(['which', 'h5dump'], stdout=subprocess.PIPE)
		if res.returncode == 0:
			os.environ['HDF5'] = os.path.dirname(os.path.dirname(res.stdout.decode('utf-8')))
			cli.notice(f'Set HDF5 to {os.environ["HDF5"]}')
	if not 'HDF5' in os.environ:
		cli.warning('HDF5 environment variable is not set')

	if not 'NETCDF' in os.environ:
		res = subprocess.run(['which', 'nf-config'], stdout=subprocess.PIPE)
		if res.returncode == 0:
			os.environ['NETCDF'] = os.path.dirname(os.path.dirname(res.stdout.decode('utf-8')))
			res = subprocess.run(['nf-config', '--includedir'], stdout=subprocess.PIPE)
			os.environ['NETCDF_INC'] = res.stdout.decode('utf-8').strip()
			res = subprocess.run(['nf-config', '--flibs'], stdout=subprocess.PIPE)
			os.environ['NETCDF_LIB'] = re.search(r'-L([^ ]*)', res.stdout.decode('utf-8'))[1]
			cli.notice(f'Set NETCDF_INC to {os.environ["NETCDF_INC"]}')
			cli.notice(f'Set NETCDF_LIB to {os.environ["NETCDF_LIB"]}')
	if not 'NETCDF' in os.environ:
		cli.warning('NETCDF environment variable is not set!')

	if not 'JASPERINC' in os.environ or not 'JASPERLIB' in os.environ:
		if 'JASPER_ROOT' in os.environ:
			os.environ['JASPERINC'] = os.environ['JASPER_ROOT'] + '/include'
			os.environ['JASPERLIB'] = os.environ['JASPER_ROOT'] + '/lib'
			cli.notice(f'Set JASPERINC to {os.environ["JASPERINC"]}.')
			cli.notice(f'Set JASPERLIB to {os.environ["JASPERLIB"]}.')
		else:
			cli.error('JASPERINC and JASPERLIB environment variables are not set!')

	if not 'LIBPNG_ROOT' in os.environ:
		cli.warning('LIBPNG_ROOT environment variable is not set. Library PNG may not be found!')

	if not 'WRFIO_NCD_LARGE_FILE_SUPPORT' in os.environ:
		os.environ['WRFIO_NCD_LARGE_FILE_SUPPORT'] = '1'
		cli.notice('Set WRFIO_NCD_LARGE_FILE_SUPPORT to 1.')

	if args.rttov:
		os.environ['RTTOV'] = args.rttov
		cli.notice(f'Use RTTOV in {args.rttov}.')

	# ---------------------------------------------------------------------------------
	#                                    WRF
	os.chdir(wrf_root)
	version = wrf_version(wrf_root)
	if version <= Version('3.6.1'):
		os.environ['BUFR'] = '1'
	# Fix possible code bugs.
	if Version('3.6.1') <= version <= Version('3.8.1'):
		edit_file('phys/module_cu_g3.F', [['integer,  dimension \(12\) :: seed', 'integer,  dimension (33) :: seed']])
	if args.force: run('./clean -a 1> /dev/null 2>&1')
	expected_exe_files = ('main/wrf.exe', 'main/real.exe', 'main/ndown.exe', 'main/tc.exe')
	if not check_files(expected_exe_files):
		cli.notice('Configure WRF ...')
		if args.use_grib:
			cli.notice('Set GRIB2 flag.')
			edit_file('./arch/Config.pl', [
				['\$I_really_want_to_output_grib2_from_WRF = "FALSE"', '$I_really_want_to_output_grib2_from_WRF = "TRUE"']
			])
		if args.use_hyb:
			child = pexpect.spawn('./configure -hyb', encoding='utf-8')
		else:
			child = pexpect.spawn('./configure', encoding='utf-8')
		child.expect('Enter selection.*')
		if platform.system() == 'Darwin':
			if args.compiler_suite == 'gnu':
				child.sendline('15')
		else:
			if args.compiler_suite == 'intel':
				if args.openmp:
					child.sendline('16') # INTEL (ifort/icc) dm+sm
				else:
					child.sendline('15') # INTEL (ifort/icc) dmpar
			elif args.compiler_suite == 'gnu':
				if args.openmp:
					child.sendline('35') # GNU (gfortran/gcc) dm+sm
				else:
					child.sendline('34') # GNU (gfortran/gcc) dmpar
			elif args.compiler_suite == 'pgi':
				if args.openmp:
					child.sendline('55') # PGI (pgf90/pgcc) dm+sm
				else:
					child.sendline('54') # PGI (pgf90/pgcc) dmpar
		child.expect('Compile for nesting.*:')
		child.sendline('1')
		if platform.system() == 'Darwin': child.expect('This build of WRF will use NETCDF4 with HDF5 compression')
		child.wait()

		if args.compiler_suite == 'intel':
			edit_file('./configure.wrf', [
				['mpif90', 'mpiifort'],
				['mpicc', 'mpiicc']
			])
		elif args.compiler_suite == 'pgi':
			edit_file('./configure.wrf', [
				['pgf90', 'pgfortran'],
				['mpif90', 'mpifort']
			])

		# Fix for OpenMPI.
		edit_file('./configure.wrf', [
			['DM_CC\s*=\s*mpicc\s*$', 'DM_CC = mpicc -DMPI2_SUPPORT\n']
		])

		cli.notice('Compile WRF ...')
		if args.debug:
			if args.compiler_suite == 'intel':
				debug_options = '-O0 -g -traceback'
			elif args.compiler_suite == 'gnu':
				debug_options = '-O0 -g -fbacktrace'
			edit_file('configure.wrf', [
				['FCFLAGS\s*=\s*\$\(FCOPTIM\)\s*\$\(FCBASEOPTS\)', f'FCFLAGS = {debug_options} $(FCBASEOPTS)']
			])
		if args.verbose:
			run(f'./compile em_real')
		else:
			run(f'./compile em_real 1> compile.out 2>&1')
		
		if check_files(expected_exe_files):
			cli.notice('Succeeded.')
		else:
			if args.verbose:
				cli.error('Failed!')
			else:
				cli.error(f'Failed! Check {wrf_root}/compile.out')
	else:
		cli.notice('WRF is already built.')

	# ---------------------------------------------------------------------------------
	#                                    WPS
	os.chdir(wps_root)
	if args.force: run('./clean -a 1> /dev/null 2>&1')
	expected_exe_files = ('geogrid/src/geogrid.exe', 'metgrid/src/metgrid.exe', 'ungrib/src/ungrib.exe')
	if not check_files(expected_exe_files):
		cli.notice('Configure WPS ...')
		child = pexpect.spawn('./configure')
		child.expect('Enter selection.*')
		if args.compiler_suite == 'intel':
			child.sendline('19') # Linux x86_64, Intel compiler    (dmpar)
		elif args.compiler_suite == 'gnu':
			child.sendline('3')  # Linux x86_64, gfortran    (dmpar)
		elif args.compiler_suite == 'pgi':
			child.sendline('7')
		child.wait()

		if args.compiler_suite == 'intel':
			edit_file('./configure.wps', [
				['mpif90', 'mpiifort'],
				['mpicc', 'mpiicc']
			])
		elif args.compiler_suite == 'pgi':
			edit_file('./configure.wps', [
				['pgf90', 'pgfortran'],
				['mpif90', 'mpifort']
			])
		else:
			run('sed -i "s/mpicc -cc=.*/mpicc/" configure.wps')
			run('sed -i "s/mpif90 -f90=.*/mpif90/" configure.wps')

		run('sed -i "s/WRF_DIR\s*=.*/WRF_DIR = ..\/WRF/" configure.wps')
		if 'LIBPNG_ROOT' in os.environ:
			run(f'sed -i "s@COMPRESSION_LIBS\s*=\(.*\)@COMPRESSION_LIBS = \\1 -L{os.environ["LIBPNG_ROOT"]}/lib@" configure.wps')
			run(f'sed -i "s@COMPRESSION_INC\s*=\(.*\)@COMPRESSION_INC = \\1 -I{os.environ["LIBPNG_ROOT"]}/include@" configure.wps')

		if args.compiler_suite == 'gnu':
			# Fix for gfortran 9.1.0.
			edit_file('ungrib/src/ngl/g2/intmath.f', [['iand\(i,i-1\)/=0', 'iand(i,i-1_8)/=0']], return_on_first_match=True)
			edit_file('ungrib/src/ngl/g2/intmath.f', [['iand\(i,i-1\)/=0', 'iand(i,i-1_4)/=0']], return_on_first_match=True)
			edit_file('ungrib/src/ngl/g2/intmath.f', [['iand\(i,i-1\)/=0', 'iand(i,i-1_2)/=0']], return_on_first_match=True)
			edit_file('ungrib/src/ngl/g2/intmath.f', [['iand\(i,i-1\)/=0', 'iand(i,i-1_1)/=0']], return_on_first_match=True)

		# Fix for OpenMPI.
		edit_file('./configure.wps', [
			['DM_CC\s*=\s*mpicc\s*$', 'DM_CC = mpicc -DMPI2_SUPPORT\n']
		])

		cli.notice('Compile WPS ...')
		if args.verbose:
			run('./compile')
		else:
			run('./compile 1> compile.out 2>&1')

		if check_files(expected_exe_files):
			cli.notice('Succeeded.')
		else:
			if args.verbose:
				cli.error('Failed!')
			else:
				cli.error(f'Failed! Check {wps_root}/compile.out')
	else:
		cli.notice('WPS is already built.')

	# ---------------------------------------------------------------------------------
	#                                    WRFPLUS
	os.chdir(wrfplus_root)
	if args.force: run('./clean -a 1> /dev/null 2>&1')
	if Version('3.6.1') <= version <= Version('3.9.1'):
		edit_file('phys/module_cu_g3.F', [['integer,  dimension \(12\) :: seed', 'integer,  dimension (33) :: seed']])
		if version == Version('3.6.1'):
			line_number = 841
		elif version == Version('3.8.1'):
			line_number = 855
		elif version == Version('3.9.1'):
			line_number = 856
		else:
			error('Find out the wrong OpenMP directive in WRFPLUS/main/module_wrf_top.F!')
		edit_file('main/module_wrf_top.F', [[line_number, '   !$OMP DEFAULT (SHARED) PRIVATE ( ij )\n']])
	if version >= Version('4.0'):
		expected_exe_files = ('main/wrfplus.exe')
	else:
		expected_exe_files = ('main/wrf.exe')
	if not check_files(expected_exe_files):
		cli.notice('Configure WRFPLUS ...')
		if args.use_grib:
			cli.notice('Set GRIB2 flag.')
			edit_file('./arch/Config.pl', [
				['\$I_really_want_to_output_grib2_from_WRF = "FALSE"', '$I_really_want_to_output_grib2_from_WRF = "TRUE"']
			])
		child = pexpect.spawn('./configure wrfplus')
		child.expect('Enter selection.*')
		if args.compiler_suite == 'intel':
			if version <= Version('3.6.1'):
				child.sendline('8')
			else:
				child.sendline('34')
		elif args.compiler_suite == 'gnu':
			child.sendline('18')
		elif args.compiler_suite == 'pgi':
			child.sendline('28')
		child.wait()

		if args.compiler_suite == 'intel':
			edit_file('./configure.wrf', [
				['mpif90', 'mpiifort'],
				['mpicc', 'mpiicc'],
				['override-limits', 'qoverride-limits']
			])

		# Fix for OpenMPI.
		edit_file('./configure.wrf', [
			['DM_CC\s*=\s*mpicc\s*$', 'DM_CC = mpicc -DMPI2_SUPPORT\n']
		])

		cli.notice('Compile WRFPLUS ...')
		if args.debug:
			if args.compiler_suite == 'intel':
				debug_options = '-O0 -g -traceback'
			elif args.compiler_suite == 'gnu':
				debug_options = '-O0 -g -fbacktrace'
			edit_file('configure.wrf', [
				['FCFLAGS\s*=\s*\$\(FCOPTIM\)\s*\$\(FCBASEOPTS\)', f'FCFLAGS = {debug_options} $(FCBASEOPTS)']
			])
		if version >= Version('4.0'):
			build_target = 'wrfplus'
		else:
			build_target = 'wrf'
		if args.verbose:
			run(f'./compile {build_target}')
		else:
			run(f'./compile {build_target} 1> compile.out 2>&1')

		if check_files(expected_exe_files):
			cli.notice('Succeeded.')
		else:
			if args.verbose:
				cli.error('Failed!')
			else:
				cli.error(f'Failed! Check {wrfplus_root}/compile.out')
	else:
		cli.notice('WRFPLUS is already built.')

	# ---------------------------------------------------------------------------------
	#                                    WRFDA
	os.chdir(wrfda_root)
	os.environ['WRFPLUS_DIR'] = wrfplus_root
	if args.force: run('./clean -a 1> /dev/null 2>&1')
	if Version('3.6.1') <= version <= Version('3.9.1'):
		cli.warning(f'Fix {wrfda_root}/var/da/da_define_structures/da_zero_y.inc')
		edit_file('var/da/da_define_structures/da_zero_y.inc', [
			[', value \)', ', value_ )'],
			[':: value$', ':: value_\nreal value'],
			['if \(.not.\(present\(value\)\)\) value = 0.0', '''
   if (.not.(present(value_))) then
      value = 0.0
   else
      value = value_
   end if
''']
		])
	if version == Version('4.1.1'):
		cli.warning(f'Fix {wrfda_root}/share/input_wrf.F')
		edit_file('share/input_wrf.F', [
			['FUNCTION check_which_switch', 'FUNCTION check_which_switch1']
		])
	expected_exe_files = [
		'var/build/da_advance_time.exe',
		'var/build/da_bias_airmass.exe',
		'var/build/da_bias_scan.exe',
		'var/build/da_bias_sele.exe',
		'var/build/da_bias_verif.exe',
		'var/build/da_rad_diags.exe',
		'var/build/da_tune_obs_desroziers.exe',
		'var/build/da_tune_obs_hollingsworth1.exe',
		'var/build/da_tune_obs_hollingsworth2.exe',
		'var/build/da_update_bc_ad.exe',
		'var/build/da_update_bc.exe',
		'var/build/da_verif_grid.exe',
		'var/build/da_verif_obs.exe',
		'var/build/da_wrfvar.exe',
		'var/build/gen_be_addmean.exe',
		'var/build/gen_be_cov2d3d_contrib.exe',
		'var/build/gen_be_cov2d.exe',
		'var/build/gen_be_cov3d2d_contrib.exe',
		'var/build/gen_be_cov3d3d_bin3d_contrib.exe',
		'var/build/gen_be_cov3d3d_contrib.exe',
		'var/build/gen_be_cov3d.exe',
		'var/build/gen_be_diags.exe',
		'var/build/gen_be_diags_read.exe',
		'var/build/gen_be_ensmean.exe',
		'var/build/gen_be_ensrf.exe',
		'var/build/gen_be_ep1.exe',
		'var/build/gen_be_ep2.exe',
		'var/build/gen_be_etkf.exe',
		'var/build/gen_be_hist.exe',
		'var/build/gen_be_stage0_gsi.exe',
		'var/build/gen_be_stage0_wrf.exe',
		'var/build/gen_be_stage1_1dvar.exe',
		'var/build/gen_be_stage1.exe',
		'var/build/gen_be_stage1_gsi.exe',
		'var/build/gen_be_stage2_1dvar.exe',
		'var/build/gen_be_stage2a.exe',
		'var/build/gen_be_stage2.exe',
		'var/build/gen_be_stage2_gsi.exe',
		'var/build/gen_be_stage3.exe',
		'var/build/gen_be_stage4_global.exe',
		'var/build/gen_be_stage4_regional.exe',
		'var/build/gen_be_vertloc.exe',
		'var/build/gen_mbe_stage2.exe',
		'var/obsproc/src/obsproc.exe']
	if not check_files(expected_exe_files):
		cli.notice('Configure WRFDA ...')
		if args.use_grib:
			cli.notice('Set GRIB2 flag.')
			edit_file('./arch/Config.pl', [
				['\$I_really_want_to_output_grib2_from_WRF = "FALSE"', '$I_really_want_to_output_grib2_from_WRF = "TRUE"']
			])
		child = pexpect.spawn('./configure 4dvar')
		child.expect('Enter selection.*')
		if args.compiler_suite == 'intel':
			child.sendline('8')
		elif args.compiler_suite == 'gnu':
			child.sendline('18')
		elif args.compiler_suite == 'pgi':
			child.sendline('28')
		child.wait()

		if args.compiler_suite == 'intel':
			edit_file('./configure.wrf', [
				['mpif90', 'mpiifort'],
				['mpicc', 'mpiicc']
			])

		# Fix for OpenMPI.
		edit_file('./configure.wrf', [
			['DM_CC\s*=\s*mpicc\s*$', 'DM_CC = mpicc -DMPI2_SUPPORT\n']
		])

		cli.notice('Compile WRFDA ...')
		if args.debug:
			if args.compiler_suite == 'intel':
				debug_options = '-O0 -g -traceback'
			elif args.compiler_suite == 'gnu':
				debug_options = '-O0 -g -fbacktrace'
			edit_file('configure.wrf', [
				['FCFLAGS\s*=\s*\$\(FCOPTIM\)\s*\$\(FCBASEOPTS\)', f'FCFLAGS = {debug_options} $(FCBASEOPTS)']
			])
		if args.verbose:
			run(f'./compile all_wrfvar')
		else:
			run(f'./compile all_wrfvar 1> compile.out 2>&1')

		if check_files(expected_exe_files, fatal=True):
			cli.notice('Succeeded.')
		else:
			if args.verbose:
				cli.error('Failed!')
			else:
				cli.error(f'Failed! Check {wrfda_root}/compile.out')
	else:
		cli.notice('WRFDA is already built.')
Beispiel #19
0
def config_wrfda(work_root,
                 wrfda_root,
                 config,
                 args,
                 wrf_work_dir=None,
                 tag=None,
                 fg=None):
    start_time = config['custom']['start_time']
    end_time = config['custom']['end_time']
    datetime_fmt = 'YYYY-MM-DD_HH:mm:ss'
    start_time_str = start_time.format(datetime_fmt)
    max_dom = config['domains']['max_dom']

    # Need to take some parameters from wrfinput file.
    if not wrf_work_dir:
        if tag != None:
            wrf_work_dir = f'{work_root}/wrf_{tag}'
        else:
            wrf_work_dir = f'{work_root}/wrf'

    if max_dom > 1:
        if not has_key(config, ('custom', 'wrfda', 'dom')):
            cli.error(
                'You need to set custom->wrfda->dom to set which domain to work on!'
            )
        dom_idx = config['custom']['wrfda']['dom']
        dom_str = 'd' + str(dom_idx + 1).zfill(2)
        if tag != None:
            wrfda_work_dir = f'{work_root}/wrfda_{tag}/{dom_str}'
        else:
            wrfda_work_dir = f'{work_root}/wrfda/{dom_str}'
    else:
        dom_idx = 0
        dom_str = 'd01'
        if tag != None:
            wrfda_work_dir = f'{work_root}/wrfda_{tag}'
        else:
            wrfda_work_dir = f'{work_root}/wrfda'
    if not os.path.isdir(wrfda_work_dir): os.makedirs(wrfda_work_dir)
    os.chdir(wrfda_work_dir)

    version = wrf_version(wrfda_root)

    if os.path.isfile(f'{wrf_work_dir}/wrfinput_{dom_str}'):
        f = Dataset(f'{wrf_work_dir}/wrfinput_{dom_str}')
    elif os.path.isfile(f'{wrf_work_dir}/wrfout_{dom_str}_{start_time_str}'):
        f = Dataset(f'{wrf_work_dir}/wrfout_{dom_str}_{start_time_str}')
    elif fg:
        f = Dataset(fg)
    else:
        cli.error(
            f'config_wrfda: Cannot find wrfinput or wrfout in {wrf_work_dir} or wrfvar!'
        )
    num_land_cat = f.getncattr('NUM_LAND_CAT')
    hypsometric_opt = f.getncattr('HYPSOMETRIC_OPT')
    f.close()

    time_window = get_value(config, ('custom', 'wrfda', 'time_window'), 360)
    # Read in namelist template (not exact Fortran namelist format, we need to change it).
    template = open(f'{wrfda_root}/var/README.namelist').read()
    template = re.sub(r'^[^&]*', '', template, flags=re.DOTALL)
    template = re.sub(r';.*', '', template)
    template = re.sub(r'\([^\)]*\)', '', template)
    namelist_input = f90nml.read(StringIO(template))
    namelist_input['wrfvar1']['var4d_lbc'] = False
    namelist_input['wrfvar18']['analysis_date'] = start_time_str
    namelist_input['wrfvar21']['time_window_min'] = start_time.subtract(
        minutes=time_window / 2).format(datetime_fmt)
    namelist_input['wrfvar22']['time_window_max'] = start_time.add(
        minutes=time_window / 2).format(datetime_fmt)
    # Fix bugs
    namelist_input['wrfvar2']['qc_rej_both'] = False
    namelist_input['wrfvar14']['rtminit_satid'] = -1
    namelist_input['wrfvar14']['rtminit_sensor'] = -1
    if version == Version('3.6.1'):
        namelist_input['wrfvar4']['use_iasiobs'] = False
        del namelist_input['wrfvar4']['use_iasisobs']
        namelist_input['wrfvar4']['use_seviriobs'] = False
        del namelist_input['wrfvar4']['use_sevirisobs']
        namelist_input['wrfvar5']['max_omb_spd'] = namelist_input['wrfvar5'][
            'max_omb_sp']
        del namelist_input['wrfvar5']['max_omb_sp']
        namelist_input['wrfvar5']['max_error_spd'] = namelist_input['wrfvar5'][
            'max_error_sp']
        del namelist_input['wrfvar5']['max_error_sp']
    elif version > Version('3.8.1'):
        namelist_input['wrfvar11']['write_detail_grad_fn'] = True
    namelist_input['wrfvar11']['calculate_cg_cost_fn'] = True
    # Merge namelist.input in tutorial.
    tmp = f90nml.read(f'{wrfda_root}/var/test/tutorial/namelist.input')
    for key, value in tmp.items():
        if not key in namelist_input:
            namelist_input[key] = value
    namelist_input['time_control']['run_hours'] = config['custom'][
        'forecast_hours']
    namelist_input['time_control']['start_year'] = [
        int(start_time.format("Y")) for i in range(max_dom)
    ]
    namelist_input['time_control']['start_month'] = [
        int(start_time.format("M")) for i in range(max_dom)
    ]
    namelist_input['time_control']['start_day'] = [
        int(start_time.format("D")) for i in range(max_dom)
    ]
    namelist_input['time_control']['start_hour'] = [
        int(start_time.format("H")) for i in range(max_dom)
    ]
    namelist_input['time_control']['end_year'] = [
        int(end_time.format("Y")) for i in range(max_dom)
    ]
    namelist_input['time_control']['end_month'] = [
        int(end_time.format("M")) for i in range(max_dom)
    ]
    namelist_input['time_control']['end_day'] = [
        int(end_time.format("D")) for i in range(max_dom)
    ]
    namelist_input['time_control']['end_hour'] = [
        int(end_time.format("H")) for i in range(max_dom)
    ]
    namelist_input['time_control']['frames_per_outfile'] = [
        1 for i in range(max_dom)
    ]
    for key, value in config['time_control'].items():
        namelist_input['time_control'][key] = value
    for key, value in config['domains'].items():
        namelist_input['domains'][key] = value
    # WRFDA only take grids parameters one domain at a time.
    namelist_input['domains']['max_dom'] = 1
    for key in ('e_we', 'e_sn', 'e_vert', 'dx', 'dy', 'grid_id', 'parent_id',
                'i_parent_start', 'j_parent_start', 'parent_grid_ratio',
                'parent_time_step_ratio'):
        if key in config['domains']:
            namelist_input['domains'][key] = config['domains'][key][dom_idx]
    namelist_input['domains']['hypsometric_opt'] = hypsometric_opt
    # Sync physics parameters.
    if 'physics' in config:
        for key, value in config['physics'].items():
            namelist_input['physics'][key] = value
    namelist_input['physics']['num_land_cat'] = num_land_cat
    if version == Version('3.9.1'):
        namelist_input['dynamics']['gwd_opt'] = 0
    # Write customized parameters.
    for tag in range(1, 23):
        section = f'wrfvar{tag}'
        for key, value in config[section].items():
            namelist_input[section][key] = value

    # Validate some parameters.
    for key in ('as1', 'as2', 'as3', 'as4', 'as5'):
        if namelist_input['wrfvar7'][key] == -1:
            cli.error(f'wrfvar7->{key} is -1!')

    namelist_input.write(f'{wrfda_work_dir}/namelist.input', force=True)

    cli.notice('Succeeded.')
Beispiel #20
0
    def test_releases(self):
        for name, info in self.releases.items():
            with self.subTest(name=name):
                # We do extra checks in the case a new release is being made. This
                # is because some wraps are not passing all tests but we force making
                # them compliant next time we do a release.
                versions: T.List[str] = info['versions']
                latest_tag = f'{name}_{versions[0]}'
                extra_checks = latest_tag not in self.tags

                # Make sure we can load wrap file
                config = configparser.ConfigParser(interpolation=None)
                config.read(f'subprojects/{name}.wrap')

                # Basic checks
                with self.subTest(step='basic'):
                    self.assertTrue(re.fullmatch('[a-z][a-z-1-9._-]*', name))
                    self.assertEqual(config.sections()[0], 'wrap-file')
                    wrap_section = config['wrap-file']
                    self.assertIn('directory', wrap_section)
                    self.check_has_no_path_separators(
                        wrap_section['directory'])
                    self.assertIn('source_filename', wrap_section)
                    self.check_has_no_path_separators(
                        wrap_section['source_filename'])
                    self.assertIn('source_url', wrap_section)
                    self.assertIn('source_hash', wrap_section)

                # FIXME: Not all wraps currently comply, only check for wraps we modify.
                if extra_checks:
                    with self.subTest(step='provide'):
                        self.assertIn('provide', config.sections())
                        self.assertTrue(config.items('provide'))

                patch_path = self.get_patch_path(wrap_section)
                if patch_path:
                    with self.subTest(step='patch_directory'):
                        self.assertTrue(patch_path.is_dir())
                        # FIXME: Not all wraps currently complies, only check for wraps we modify.
                        if extra_checks:
                            self.check_files(name, patch_path)

                # Make sure it has the same deps/progs provided
                with self.subTest(step='have_same_provides'):
                    progs = []
                    deps = []
                    if 'provide' in config.sections():
                        provide = config['provide']
                        progs = [
                            i.strip() for i in provide.get(
                                'program_names', '').split(',')
                        ]
                        deps = [
                            i.strip() for i in provide.get(
                                'dependency_names', '').split(',')
                        ]
                        for k in provide:
                            if k not in {'dependency_names', 'program_names'}:
                                deps.append(k.strip())
                    progs = [i for i in progs if i]
                    deps = [i for i in deps if i]
                    self.assertEqual(sorted(progs),
                                     sorted(info.get('program_names', [])))
                    self.assertEqual(sorted(deps),
                                     sorted(info.get('dependency_names', [])))

                # Verify versions are sorted
                with self.subTest(step='sorted versions'):
                    versions: T.List[str] = info['versions']
                    self.assertGreater(len(versions), 0)
                    versions_obj = [Version(v) for v in versions]
                    self.assertEqual(sorted(versions_obj, reverse=True),
                                     versions_obj)

                # The first version could be a new release, all others must have
                # a corresponding tag already.
                for i, v in enumerate(versions):
                    t = f'{name}_{v}'
                    ver, rev = v.rsplit('-', 1)
                    with self.subTest(step='valid release name'):
                        self.assertTrue(re.fullmatch('[a-z0-9._]+', ver))
                        self.assertTrue(re.fullmatch('[0-9]+', rev))
                    if i == 0:
                        with self.subTest(step='check_source_url'):
                            self.check_source_url(name, wrap_section, ver)
                    if i == 0 and t not in self.tags:
                        with self.subTest(step='check_new_release'):
                            self.check_new_release(name)
                            with self.subTest(
                                    f'If this works now, please remove it from broken_{platform.system().lower()}!'
                            ):
                                self.assertNotIn(name, self.skip)
                            self.check_meson_version(name, ver, patch_path)
                    else:
                        with self.subTest(step='version is tagged'):
                            self.assertIn(t, self.tags)
Beispiel #21
0
def build_gsi(wrf_root, gsi_root, args):
    # Check environment.
    if not 'HDF5' in os.environ:
        res = subprocess.run(['which', 'h5dump'], stdout=subprocess.PIPE)
        if res.returncode == 0:
            os.environ['HDF5'] = os.path.dirname(
                os.path.dirname(res.stdout.decode('utf-8')))
            cli.notice(f'Set HDF5 to {os.environ["HDF5"]}')
    if not 'HDF5' in os.environ:
        cli.warning('HDF5 environment variable is not set')

    if not 'NETCDF' in os.environ:
        res = subprocess.run(['which', 'ncdump'], stdout=subprocess.PIPE)
        if res.returncode == 0:
            os.environ['NETCDF'] = os.path.dirname(
                os.path.dirname(res.stdout.decode('utf-8')))
            cli.notice(f'Set NETCDF to {os.environ["NETCDF"]}')
    if not 'NETCDF' in os.environ:
        cli.warning('NETCDF environment variable is not set!')

    if not os.getenv('LAPACK_PATH') and args.compiler_suite != 'intel':
        cli.error('Shell variable LAPACK_PATH is not set!')

    version = gsi_version(args.gsi_root)

    if version <= Version('3.6'):
        # 3.7 changes: Added wrf interface as a library (wrflib). No need to compile WRF with GSI and EnKF.
        if not os.path.isdir(args.wrf_root):
            cli.error(f'WRF directory {args.wrf_root} does not exist!')
        os.chdir(args.wrf_root)
        expected_exe_files = ('main/wrf.exe')
        if not check_files(expected_exe_files):
            cli.error('WRF has not been built! Build it first.')

    os.chdir(args.gsi_root)

    if args.force: run('rm -rf build')
    if not os.path.isdir('build'): os.mkdir('build')
    os.chdir('build')
    if version == Version('3.6'):
        expected_exe_files = ('bin/gsi.x', 'lib/libbacio_v2.0.1.a',
                              'lib/libbufr_v10.2.5.a', 'lib/libcrtm_v2.2.3.a',
                              'lib/libenkfdeplib.a', 'lib/libenkflib.a',
                              'lib/libgsilib_shrd.a', 'lib/libgsilib_wrf.a',
                              'lib/libnemsio_v2.2.1.a',
                              'lib/libsfcio_v1.1.0.a', 'lib/libsigio_v2.0.1.a',
                              'lib/libsp_v2.0.2.a', 'lib/libw3emc_v2.2.0.a',
                              'lib/libw3nco_v2.0.6.a')
    elif version == Version('3.7'):
        expected_exe_files = ('bin/enkf_wrf.x', 'bin/gsi.x',
                              'lib/libbacio_v2.0.1.a', 'lib/libbufr_v10.2.5.a',
                              'lib/libcrtm_v2.2.3.a', 'lib/libenkfdeplib.a',
                              'lib/libenkflib.a', 'lib/libgsilib_shrd.a',
                              'lib/libgsilib_wrf.a', 'lib/libnemsio_v2.2.1.a',
                              'lib/libsfcio_v1.1.0.a', 'lib/libsigio_v2.0.1.a',
                              'lib/libsp_v2.0.2.a', 'lib/libw3emc_v2.2.0.a',
                              'lib/libw3nco_v2.0.6.a')
    if not check_files(expected_exe_files):
        cmake_args = f'-DBUILD_ENKF=ON -DBUILD_CORELIBS=ON -DUSE_WRF=ON -DBUILD_WRF=ON -DBUILD_GFS=OFF '
        if version == Version('3.6'):
            cli.notice('Fix GSI 3.6!')
            edit_file('../cmake/Modules/FindCORELIBS.cmake', [[
                '\${CMAKE_SOURCE_DIR}/libsrc', '${CMAKE_SOURCE_DIR}/lib/libsrc'
            ]])
            if args.compiler_suite == 'gnu':
                edit_file('../cmake/Modules/setCompilerFlags.cmake', [[
                    'set\(BACIO_Fortran_FLAGS " -O3 -fconvert=big-endian -ffree-form',
                    'set(BACIO_Fortran_FLAGS " -O3 -fconvert=big-endian'
                ]])
            elif args.compiler_suite == 'intel':
                edit_file('../cmake/Modules/setCompilerFlags.cmake', [[
                    'set \(BACIO_Fortran_FLAGS "-O3 -free -assume nocc_omp',
                    'set(BACIO_Fortran_FLAGS " -O3 -assume nocc_omp'
                ]])
            edit_file('../core-libs/sigio/CMakeLists.txt',
                      [['\*\.f\)', '*.f90)']])
            edit_file('../src/hybrid_ensemble_isotropic.F90',
                      [['stop\(123\)', 'stop 123']])
            edit_file('../src/setupoz.f90', [[
                'my_head%ij\(1\),my_head%wij\(1\)\)', 'my_head%ij,my_head%wij)'
            ]])
            cmake_args += f'-DWRFPATH={args.wrf_root}'
        if version == Version('3.7'):
            cli.notice('Fix GSI 3.7!')
            edit_file('../src/setuplight.f90',
                      [['my_head%wij\(1\)\)', 'my_head%wij)']])
            cli.warning(
                'GSI 3.7 has bug when rerun cmake, so clean all build files.')
            run('rm -rf ../build/*')
            cmake_args += '-DBUILD_UTIL_COM=ON'

        # Fix not-found -lnetcdf -lnetcdff.
        edit_file('../cmake/Modules/setCompilerFlags.cmake',
                  [['-lnetcdf -lnetcdff', '']])

        cli.notice('Configure GSI ...')
        if args.compiler_suite == 'gnu':
            cc = 'gcc'
            cxx = 'g++'
            fc = 'gfortran'
        elif args.compiler_suite == 'intel':
            cc = 'mpiicc'
            cxx = 'mpiicpc'
            fc = 'mpiifort'
        if args.verbose:
            run(f'CC={cc} CXX={cxx} FC={fc} cmake .. {cmake_args}')
        else:
            run(f'CC={cc} CXX={cxx} FC={fc} cmake .. {cmake_args} &> cmake.out'
                )

        cli.notice('Compile GSI ...')
        if args.verbose:
            run('make')
        else:
            run('make &> make.out')

        if check_files(expected_exe_files):
            cli.notice('Succeeded.')
        else:
            if args.verbose:
                cli.error('Failed')
            else:
                cli.error(f'Failed! Check {args.gsi_root}/build/make.out')
    else:
        cli.notice('GSI has already been built.')

    if version == Version('3.6'):
        os.chdir(f'{args.gsi_root}/util/bufr_tools')
        if args.force: run('make clean')
        expected_exe_files = (
            'bufr_append_sample.exe', 'bufr_decode_radiance.exe',
            'bufr_decode_sample.exe', 'bufr_encode_sample.exe',
            'prepbufr_append_retrieve.exe', 'prepbufr_append_surface.exe',
            'prepbufr_append_upperair.exe', 'prepbufr_decode_all.exe',
            'prepbufr_encode_surface.exe', 'prepbufr_encode_upperair.exe',
            'prepbufr_inventory.exe')
        if not check_files(expected_exe_files):
            edit_file('makefile', [['^\s*FC\s*=.*$', f'FC = {fc}'],
                                   ['-I\.\./\.\./dtc', '-I../../build'],
                                   ['-L\.\./\.\./dtc', '-L../../build'],
                                   ['-lbufr_i4r8', '-lbufr_v10.2.5']])

            cli.notice('Compile bufr_tools ...')
            if args.verbose:
                run('make')
            else:
                run('make &> make.out')

            if check_files(expected_exe_files):
                cli.notice('Succeeded.')
            else:
                if args.verbose:
                    cli.error('Failed!')
                else:
                    cli.error(
                        f'Failed! Check {args.gsi_root}/util/bufr_tools/make.out'
                    )
        else:
            cli.notice('GSI bufr_tools has been built.')

        os.chdir(f'{args.gsi_root}/util/Analysis_Utilities/read_diag/')
        expected_exe_files = ('read_diag_conv.exe', 'read_diag_conv_ens.exe',
                              'read_diag_rad.exe')
        if not check_files(expected_exe_files):
            edit_file('makefile', [[
                'include \.\./\.\./\.\./dtc/configure.gsi', ''
            ], ['\$\(SFC\)', fc], ['-I\.\./\.\./\.\./dtc', '-I../../../build'],
                                   [
                                       '-L\.\./\.\./\.\./src -lgsi',
                                       '-L../../../build/lib -lgsilib_shrd'
                                   ],
                                   [
                                       'FLAGS= \$\(FFLAGS_DEFAULT\)',
                                       'FLAGS = -fconvert=big-endian'
                                   ]])

            cli.notice('Compile read_diag ...')
            if args.verbose:
                run('make')
            else:
                run('make &> make.out')

            if check_files(expected_exe_files):
                cli.notice('Succeeded.')
            else:
                if args.verbose:
                    cli.error('Failed')
                else:
                    cli.error(
                        f'Failed! Check {args.gsi_root}/util/Analysis_Utilities/read_diag/make.out'
                    )
        else:
            cli.notice('GSI read_diag has been built.')
Beispiel #22
0
def config_wrfda_sens(work_root, wrfda_root, config, args, wrf_work_dir=None):
	if not 'wrfda' in config:
		cli.error('There is no "wrfda" in configuration file!')
	wrfda_config = config['wrfda']
	phys_config = config['physics'] if 'physics' in config else {}

	start_time = config['custom']['start_time']
	end_time = config['custom']['end_time']
	datetime_fmt  = 'YYYY-MM-DD_HH:mm:ss'
	start_time_str = start_time.format(datetime_fmt)
	max_dom = config['domains']['max_dom']

	if not wrf_work_dir: wrf_work_dir = work_root + '/wrf'
	if not os.path.isdir(wrf_work_dir): cli.error(f'{wrf_work_dir} does not exist!')

	wrfda_work_dir = os.path.abspath(work_root) + '/wrfda'
	if not os.path.isdir(wrfda_work_dir): os.mkdir(wrfda_work_dir)
	os.chdir(wrfda_work_dir)

	version = wrf_version(wrfda_root)

	wrfinput = Dataset(f'{wrf_work_dir}/wrfinput_d01_{start_time_str}')
	num_land_cat = wrfinput.getncattr('NUM_LAND_CAT')
	wrfinput.close()

	time_window  = config['wrfda']['time_window'] if 'time_window' in config['wrfda'] else 360
	# Read in namelist template (not exact Fortran namelist format, we need to change it).
	template = open(f'{wrfda_root}/var/README.namelist').read()
	template = re.sub(r'^[^&]*', '', template, flags=re.DOTALL)
	template = re.sub(r';.*', '', template)
	template = re.sub(r'\([^\)]*\)', '', template)
	namelist_input = f90nml.read(StringIO(template))
	# Merge namelist.input in tutorial.
	tmp = f90nml.read(f'{wrfda_root}/var/test/tutorial/namelist.input')
	for key, value in tmp.items():
		if not key in namelist_input:
			namelist_input[key] = value
	namelist_input['wrfvar1']     ['var4d_lbc']                       = False
	namelist_input['wrfvar3']     ['ob_format']                       = wrfda_config['ob_format']
	namelist_input['wrfvar6']     ['orthonorm_gradient']              = True
	namelist_input['wrfvar6']     ['use_lanczos']                     = True
	namelist_input['wrfvar6']     ['read_lanczos']                    = True
	namelist_input['wrfvar17']    ['adj_sens']                        = True
	namelist_input['wrfvar17']    ['sensitivity_option']              = 0
	namelist_input['wrfvar17']    ['analysis_type']                   = 'QC-OBS'
	namelist_input['wrfvar18']    ['analysis_date']                   = start_time_str
	namelist_input['wrfvar21']    ['time_window_min']                 = start_time.subtract(minutes=time_window/2).format(datetime_fmt)
	namelist_input['wrfvar22']    ['time_window_max']                 = start_time.add(minutes=time_window/2).format(datetime_fmt)

	# Fix bugs
	namelist_input['wrfvar2']     ['qc_rej_both']                     = False
	namelist_input['wrfvar7']     ['cv_options']                      = wrfda_config['cv_options']
	namelist_input['wrfvar14']    ['rtminit_satid']                   = -1
	namelist_input['wrfvar14']    ['rtminit_sensor']                  = -1
	namelist_input['time_control']['run_hours']                       = config['custom']['forecast_hours']
	namelist_input['time_control']['start_year']                      = [int(start_time.format("Y")) for i in range(max_dom)]
	namelist_input['time_control']['start_month']                     = [int(start_time.format("M")) for i in range(max_dom)]
	namelist_input['time_control']['start_day']                       = [int(start_time.format("D")) for i in range(max_dom)]
	namelist_input['time_control']['start_hour']                      = [int(start_time.format("H")) for i in range(max_dom)]
	namelist_input['time_control']['end_year']                        = [int(end_time.format("Y")) for i in range(max_dom)]
	namelist_input['time_control']['end_month']                       = [int(end_time.format("M")) for i in range(max_dom)]
	namelist_input['time_control']['end_day']                         = [int(end_time.format("D")) for i in range(max_dom)]
	namelist_input['time_control']['end_hour']                        = [int(end_time.format("H")) for i in range(max_dom)]
	namelist_input['time_control']['io_form_auxinput17']              = 2
	namelist_input['time_control']['auxinput17_inname']               = './gr01'
	namelist_input['time_control']['iofields_filename']               = f'{wrfda_root}/var/run/fso.io_config'
	for key, value in config['domains'].items():
		namelist_input['domains'][key] = value
	# Sync physics parameters.
	for key, value in phys_config.items():
		namelist_input['physics'][key] = value
	namelist_input['physics']['num_land_cat'] = num_land_cat
	if version == Version('3.9.1'):
		namelist_input['dynamics']['gwd_opt'] = 0

	namelist_input.write(f'{wrfda_work_dir}/namelist.input', force=True)

	cli.notice('Succeeded.')