Esempio n. 1
0
def run_wrfplus_ad(work_root, wrfplus_root, config, args):
	start_time = config['custom']['start_time']
	end_time = config['custom']['end_time']
	datetime_fmt = 'YYYY-MM-DD_HH:mm:ss'
	start_time_str = start_time.format(datetime_fmt)
	max_dom = config['domains']['max_dom']

	wrf_work_dir = os.path.abspath(work_root) + '/wrf'
	if not os.path.isdir(wrf_work_dir):
		cli.error(f'WRF work directory {wrf_work_dir} does not exist!')

	wrfplus_work_dir = os.path.abspath(work_root) + '/wrfplus'
	if not os.path.isdir(wrfplus_work_dir):
		cli.error(f'WRFPLUS has not been configured! Run config_wrfplus.py first.')
	os.chdir(wrfplus_work_dir)

	if os.path.isfile(f'{wrf_work_dir}/wrfinput_d01_{start_time_str}'):
		run(f'ln -sf {wrf_work_dir}/wrfinput_d01 .')
	elif os.path.isfile(f'{wrf_work_dir}/wrfout_d01_{start_time_str}'):
		run(f'ln -sf {wrf_work_dir}/wrfout_d01_{start_time_str} wrfinput_d01')
	run(f'ln -sf {wrf_work_dir}/wrfbdy_d01 .')
	if not os.path.isfile('final_sens_d01'):
		cli.error('There is no final_sens_d01 file!')

	version = wrf_version(wrfplus_root)

	cli.stage(f'Run WRFPLUS at {wrfplus_work_dir} ...')
	expected_files = ['wrfout_d{:02d}_{}'.format(i + 1, start_time_str) for i in range(max_dom)]
	expected_files.append(f'init_sens_d01_{start_time_str}')
	if not check_files(expected_files) or args.force:
		run('rm -f wrfout_*')
		run(f'ln -sf {wrfplus_root}/run/LANDUSE.TBL .')
		run(f'ln -sf {wrfplus_root}/run/VEGPARM.TBL .')
		run(f'ln -sf {wrfplus_root}/run/SOILPARM.TBL .')
		run(f'ln -sf {wrfplus_root}/run/GENPARM.TBL .')
		run(f'ln -sf {wrfplus_root}/run/RRTM_DATA_DBL RRTM_DATA')
		run(f'ln -sf {wrfplus_root}/run/ETAMPNEW_DATA_DBL ETAMPNEW_DATA')
		if version >= Version('4.0'):
			cmd = f'{wrfplus_root}/run/wrfplus.exe'
		else:
			cmd = f'{wrfplus_root}/run/wrf.exe'
		retries = 0
		while True:
			submit_job(cmd, args.np, config, args, wait=True)
			if os.path.isfile(f'gradient_wrfplus_d01_{start_time_str}'):
				run(f'mv gradient_wrfplus_d01_{start_time_str} init_sens_d01_{start_time_str}')
			if not check_files(expected_files):
				if retries == 10:
					cli.error(f'Failed! Check output {os.path.abspath(wrfplus_work_dir)}/rsl.error.0000.')
				retries = retries + 1
				cli.warning('Failed to run wrfplus, retry it!')
			else:
				break
		cli.notice('Succeeded.')
	else:
		cli.notice('File wrfout_* already exist.')
	run(f'ls -l {wrfplus_work_dir}/wrfout_*')
Esempio n. 2
0
def add_to_db(wrap: str, versions: T.List[T.Tuple[str, str]], releases: T.Dict[str, T.Dict[str, T.List[str]]]):
    releases.setdefault(wrap, {})
    releases[wrap].setdefault('versions', [])
    releases[wrap].setdefault('dependency_names', [])
    releases[wrap].setdefault('program_names', [])
    versions: T.List[Version] = [Version(f'{version}-{revision}') for version, revision in versions]
    versions = sorted(versions, reverse=True)
    versions: T.List[str] = [v._s for v in versions]
    progs, deps = get_provide(wrap)
    releases[wrap]['versions'] = versions
    releases[wrap]['program_names'] = progs
    releases[wrap]['dependency_names'] = deps
Esempio n. 3
0
def config_wps(work_root, wps_root, geog_root, config, args):
    if has_key(config, ('custom', 'start_time')):
        start_time = config['custom']['start_time']
        start_time_str = start_time.format('YYYY-MM-DD_HH:mm:ss')
        if not has_key(config, ('custom', 'end_time')):
            cli.error('custom->end_time does not exist in config file!')
        end_time = config['custom']['end_time']
        end_time_str = end_time.format('YYYY-MM-DD_HH:mm:ss')

    if not has_key(config, ('domains', 'max_dom')):
        cli.error('domains->max_dom does not exist in config file!')
    max_dom = config['domains']['max_dom']

    wps_work_dir = work_root + '/wps'
    if not os.path.isdir(wps_work_dir): os.makedirs(wps_work_dir)
    os.chdir(wps_work_dir)

    version = wrf_version(wps_root)
    if version < Version('3.9.1'):
        cli.error(
            f'WPS {version} may not handle GFS data correctly! Please use WPS >= 3.9.1.'
        )

    cli.notice('Edit namelist.wps for WPS.')
    copy(f'{wps_root}/namelist.wps', 'namelist.wps')
    namelist_wps = f90nml.read('namelist.wps')
    namelist_wps['share']['max_dom'] = max_dom
    if has_key(config, ('custom', 'start_time')):
        namelist_wps['share']['start_date'] = [
            start_time_str for i in range(max_dom)
        ]
    if has_key(config, ('custom', 'end_time')):
        namelist_wps['share']['end_date'] = [
            end_time_str if i == 0 else start_time_str for i in range(max_dom)
        ]
    if has_key(config, ('custom', 'background')) and has_key(
            config, ('custom', 'background', 'interval_seconds')):
        namelist_wps['share']['interval_seconds'] = config['custom'][
            'background']['interval_seconds']
    namelist_wps['geogrid']['geog_data_path'] = geog_root
    for key, value in config['geogrid'].items():
        namelist_wps['geogrid'][key] = value
    namelist_wps['geogrid']['opt_geogrid_tbl_path'] = wps_work_dir
    namelist_wps['metgrid']['opt_metgrid_tbl_path'] = wps_work_dir
    if 'metgrid' in config:
        for key, value in config['metgrid'].items():
            namelist_wps['metgrid'][key] = value
    namelist_wps.write('./namelist.wps', force=True)
    run(f'ncl -Q {script_root}/../plots/plot_domains.ncl')
    cli.notice(f'Check {wps_work_dir}/wps_show_dom.pdf for domains.')

    cli.notice('Succeeded.')
Esempio n. 4
0
    def test_releases(self):
        # Take list of git tags
        stdout = subprocess.check_output(['git', 'tag'])
        tags = [t.strip() for t in stdout.decode().splitlines()]

        with open('releases.json', 'r') as f:
            releases = json.load(f)

        # All tags must be in the releases file
        for t in tags:
            name, version = t.rsplit('_', 1)
            self.assertIn(name, releases)
            self.assertIn(version, releases[name]['versions'])

        # Verify keys are sorted
        self.assertEqual(sorted(releases.keys()), list(releases.keys()))

        # Get the list of wraps that has modified packagefiles
        with open(Path.home() / 'files.json', 'r') as f:
            changed_files = json.load(f)
        self.changed_wraps = set()
        for f in changed_files:
            if f.startswith('subprojects/packagefiles'):
                self.changed_wraps.add(f.split('/')[2])

        for name, info in releases.items():
            # Make sure we can load wrap file
            config = configparser.ConfigParser()
            config.read(f'subprojects/{name}.wrap')
            self.assertEqual(config.sections()[0], 'wrap-file')
            wrap_section = config['wrap-file']
            self.check_has_no_path_separators(wrap_section['directory'])
            self.check_has_no_path_separators(wrap_section['source_filename'])

            # Basic checks
            self.assertTrue(re.fullmatch('[a-z][a-z0-9._-]*', name))
            patch_directory = wrap_section.get('patch_directory')
            if patch_directory:
                patch_path = Path('subprojects', 'packagefiles', patch_directory)
                self.assertTrue(patch_path.is_dir())
                # FIXME: Not all wraps currently complies, only check for wraps we modify.
                if name in self.changed_wraps:
                    self.assertTrue(Path(patch_path, 'LICENSE.build').is_file())
                    self.check_files(patch_path)

            # Make sure it has the same deps/progs provided
            progs = []
            deps = []
            if 'provide' in config.sections():
                provide = config['provide']
                progs = [i.strip() for i in provide.get('program_names', '').split(',')]
                deps = [i.strip() for i in provide.get('dependency_names', '').split(',')]
                for k in provide:
                    if k not in {'dependency_names', 'program_names'}:
                        deps.append(k.strip())
            progs = [i for i in progs if i]
            deps = [i for i in deps if i]
            self.assertEqual(sorted(progs), sorted(info['program_names']))
            self.assertEqual(sorted(deps), sorted(info['dependency_names']))

            # Verify versions are sorted
            versions = info['versions']
            self.assertGreater(len(versions), 0)
            versions_obj = [Version(v) for v in versions]
            self.assertEqual(sorted(versions_obj, reverse=True), versions_obj)

            # The first version could be a new release, all others must have
            # a corresponding tag already.
            for i, v in enumerate(versions):
                t = f'{name}_{v}'
                ver, rev = v.rsplit('-', 1)
                self.assertTrue(re.fullmatch('[a-z0-9._]+', ver))
                self.assertTrue(re.fullmatch('[0-9]+', rev))
                if i == 0:
                    self.check_source_url(name, wrap_section, ver)
                if i == 0 and t not in tags:
                    self.check_new_release(name, info, wrap_section)
                else:
                    self.assertIn(t, tags)
Esempio n. 5
0
def config_wrfda_sens(work_root, wrfda_root, config, args, wrf_work_dir=None):
	if not 'wrfda' in config:
		cli.error('There is no "wrfda" in configuration file!')
	wrfda_config = config['wrfda']
	phys_config = config['physics'] if 'physics' in config else {}

	start_time = config['custom']['start_time']
	end_time = config['custom']['end_time']
	datetime_fmt  = 'YYYY-MM-DD_HH:mm:ss'
	start_time_str = start_time.format(datetime_fmt)
	max_dom = config['domains']['max_dom']

	if not wrf_work_dir: wrf_work_dir = work_root + '/wrf'
	if not os.path.isdir(wrf_work_dir): cli.error(f'{wrf_work_dir} does not exist!')

	wrfda_work_dir = os.path.abspath(work_root) + '/wrfda'
	if not os.path.isdir(wrfda_work_dir): os.mkdir(wrfda_work_dir)
	os.chdir(wrfda_work_dir)

	version = wrf_version(wrfda_root)

	wrfinput = Dataset(f'{wrf_work_dir}/wrfinput_d01_{start_time_str}')
	num_land_cat = wrfinput.getncattr('NUM_LAND_CAT')
	wrfinput.close()

	time_window  = config['wrfda']['time_window'] if 'time_window' in config['wrfda'] else 360
	# Read in namelist template (not exact Fortran namelist format, we need to change it).
	template = open(f'{wrfda_root}/var/README.namelist').read()
	template = re.sub(r'^[^&]*', '', template, flags=re.DOTALL)
	template = re.sub(r';.*', '', template)
	template = re.sub(r'\([^\)]*\)', '', template)
	namelist_input = f90nml.read(StringIO(template))
	# Merge namelist.input in tutorial.
	tmp = f90nml.read(f'{wrfda_root}/var/test/tutorial/namelist.input')
	for key, value in tmp.items():
		if not key in namelist_input:
			namelist_input[key] = value
	namelist_input['wrfvar1']     ['var4d_lbc']                       = False
	namelist_input['wrfvar3']     ['ob_format']                       = wrfda_config['ob_format']
	namelist_input['wrfvar6']     ['orthonorm_gradient']              = True
	namelist_input['wrfvar6']     ['use_lanczos']                     = True
	namelist_input['wrfvar6']     ['read_lanczos']                    = True
	namelist_input['wrfvar17']    ['adj_sens']                        = True
	namelist_input['wrfvar17']    ['sensitivity_option']              = 0
	namelist_input['wrfvar17']    ['analysis_type']                   = 'QC-OBS'
	namelist_input['wrfvar18']    ['analysis_date']                   = start_time_str
	namelist_input['wrfvar21']    ['time_window_min']                 = start_time.subtract(minutes=time_window/2).format(datetime_fmt)
	namelist_input['wrfvar22']    ['time_window_max']                 = start_time.add(minutes=time_window/2).format(datetime_fmt)

	# Fix bugs
	namelist_input['wrfvar2']     ['qc_rej_both']                     = False
	namelist_input['wrfvar7']     ['cv_options']                      = wrfda_config['cv_options']
	namelist_input['wrfvar14']    ['rtminit_satid']                   = -1
	namelist_input['wrfvar14']    ['rtminit_sensor']                  = -1
	namelist_input['time_control']['run_hours']                       = config['custom']['forecast_hours']
	namelist_input['time_control']['start_year']                      = [int(start_time.format("Y")) for i in range(max_dom)]
	namelist_input['time_control']['start_month']                     = [int(start_time.format("M")) for i in range(max_dom)]
	namelist_input['time_control']['start_day']                       = [int(start_time.format("D")) for i in range(max_dom)]
	namelist_input['time_control']['start_hour']                      = [int(start_time.format("H")) for i in range(max_dom)]
	namelist_input['time_control']['end_year']                        = [int(end_time.format("Y")) for i in range(max_dom)]
	namelist_input['time_control']['end_month']                       = [int(end_time.format("M")) for i in range(max_dom)]
	namelist_input['time_control']['end_day']                         = [int(end_time.format("D")) for i in range(max_dom)]
	namelist_input['time_control']['end_hour']                        = [int(end_time.format("H")) for i in range(max_dom)]
	namelist_input['time_control']['io_form_auxinput17']              = 2
	namelist_input['time_control']['auxinput17_inname']               = './gr01'
	namelist_input['time_control']['iofields_filename']               = f'{wrfda_root}/var/run/fso.io_config'
	for key, value in config['domains'].items():
		namelist_input['domains'][key] = value
	# Sync physics parameters.
	for key, value in phys_config.items():
		namelist_input['physics'][key] = value
	namelist_input['physics']['num_land_cat'] = num_land_cat
	if version == Version('3.9.1'):
		namelist_input['dynamics']['gwd_opt'] = 0

	namelist_input.write(f'{wrfda_work_dir}/namelist.input', force=True)

	cli.notice('Succeeded.')