Exemple #1
0
    def update_wrf_namelist(self):
        from gis4wrf.core.transforms.project_to_wrf_namelist import convert_project_to_wrf_namelist

        self.fill_domains()
        nml_patch = convert_project_to_wrf_namelist(self)

        # Allow the user to change the following max_dom sized variables, but patch if the size is wrong.
        # The size is typically wrong when the template namelist from the WRF distribution is initially
        # copied and the user has nested domains, since the template assumes no nesting.
        # If the variable exists already and the size is wrong, then the existing array is cut or extended,
        # where extension repeats the last value.
        skip_patch_if_size_matches = {
            'time_control':
            ['history_interval', 'frames_per_outfile', 'input_from_file'],
            'domains': ['e_vert']
        }
        nml_old = read_namelist(self.wrf_namelist_path, 'wrf')
        nml_path = self.wrf_namelist_path
        for group_name, var_names in skip_patch_if_size_matches.items():
            if group_name not in nml_old:
                continue
            for var_name in var_names:
                if var_name not in nml_old[group_name]:
                    continue
                old_size = len(nml_old[group_name][var_name])
                patch_size = len(nml_patch[group_name][var_name])
                if old_size == patch_size:
                    logger.debug(
                        f'{nml_path}: size of {group_name}/{var_name} as expected, skipping patch'
                    )
                    del nml_patch[group_name][var_name]
                    continue
                var_old = nml_old[group_name][var_name]
                if old_size < patch_size:
                    logger.debug(
                        f'{nml_path}: size of {group_name}/{var_name} smaller than expected,'
                        +
                        f' extending to correct size by repeating last array value {var_old[-1]}'
                    )
                    var_patch = var_old + [var_old[-1]
                                           ] * (patch_size - old_size)
                else:
                    logger.debug(
                        f'{nml_path}: size of {group_name}/{var_name} bigger than expected,'
                        + ' truncating to correct size')
                    var_patch = var_old[:patch_size]
                nml_patch[group_name][var_name] = var_patch

        patch_namelist(self.wrf_namelist_path, nml_patch)
Exemple #2
0
def patch_namelist(path: str,
                   patch: dict,
                   delete_vars: List[str] = []) -> None:
    logger.debug(f'patching {path}')
    nml = read_namelist(path)
    for group_name, group_patch in patch.items():
        if group_name not in nml:
            logger.debug(
                f'{path}: group {group_name} not found, inserting from patch')
            nml[group_name] = group_patch
            continue
        for var_name, val in group_patch.items():
            logger.debug(f'{path}: patching {group_name}/{var_name} = {val}')
            nml[group_name][var_name] = val
        for var_name in delete_vars:
            try:
                del nml[group_name][var_name]
                logger.debug(f'{path}: removing {group_name}/{var_name}')
            except KeyError:
                pass
    nml.indent = 0
    nml.write(path, force=True)
def convert_project_to_wrf_namelist(project: Project) -> dict:
    wrf = OrderedDict() # type: dict

    try:
        met_spec = project.met_dataset_spec
    except KeyError:
        raise RuntimeError('Meteorological data not selected')

    geogrid_nc = [os.path.join(project.run_wps_folder, 'geo_em.d{:02d}.nc'.format(i))
                  for i in range(1, project.domain_count + 1)]
    if not all(map(os.path.exists, geogrid_nc)):
        raise RuntimeError('Geogrid output files not found, run geogrid first')
    
    dx = [] # type: List[float]
    dy = [] # type: List[float]
    for path in geogrid_nc:
        ds = nc.Dataset(path)
        dx.append(ds.getncattr('DX'))
        dy.append(ds.getncattr('DY'))
        num_land_cat = ds.getncattr('NUM_LAND_CAT')
        ds.close()
        logger.debug(f'read metadata from {path}: DX={dx[-1]}, DY={dy[-1]}, NUM_LAND_CAT={num_land_cat}')

    metgrid_nc = glob.glob(os.path.join(project.run_wps_folder, 'met_em.d01.*.nc'))
    if not metgrid_nc:
        raise RuntimeError('Metgrid output files not found, run metgrid first')
    ds = nc.Dataset(metgrid_nc[0])
    num_metgrid_levels = ds.dimensions['num_metgrid_levels'].size
    num_metgrid_soil_levels = ds.getncattr('NUM_METGRID_SOIL_LEVELS')
    ds.close()
    logger.debug(f'read metadata from {metgrid_nc[0]}: num_metgrid_levels={num_metgrid_levels}, ' +
                 f'NUM_METGRID_SOIL_LEVELS={num_metgrid_soil_levels}')
        
    domains = project.data['domains']
    num_domains = len(domains)
    assert num_domains > 0

    start, end = met_spec['time_range']
    wrf['time_control'] = OrderedDict(
        start_year = [start.year] * num_domains,
        start_month = [start.month] * num_domains,
        start_day = [start.day] * num_domains,
        start_hour = [start.hour] * num_domains,
        start_minute = [start.minute] * num_domains,
        start_second = [start.second] * num_domains,
        end_year = [end.year] * num_domains,
        end_month = [end.month] * num_domains,
        end_day = [end.day] * num_domains,
        end_hour = [end.hour] * num_domains,
        end_minute = [end.minute] * num_domains,
        end_second = [end.second] * num_domains,
        interval_seconds = met_spec['interval_seconds'],
        history_interval = [60] * num_domains,
        frames_per_outfile = [100] * num_domains,
        input_from_file = [True] * num_domains,
        nocolons = True
    )

    parent_grid_ratio = [1] + [domain['parent_cell_size_ratio'] for domain in domains[:0:-1]]
    wrf['domains'] = OrderedDict(
        max_dom = num_domains,
        grid_id = list(range(1, num_domains + 1)),
        parent_id = [1] + list(range(1, num_domains)),
        parent_grid_ratio = parent_grid_ratio,
        parent_time_step_ratio = parent_grid_ratio,
        i_parent_start = [domain['parent_start'][0] for domain in domains[::-1]],
        j_parent_start = [domain['parent_start'][1] for domain in domains[::-1]],
        # e_we and e_sn represent the number of velocity points (i.e., u-staggered or v-staggered points)
        # which is one more than the number of cells in each dimension.
        e_we = [domain['domain_size'][0] + domain['padding_left'] + domain['padding_right'] + 1 for domain in domains[::-1]],
        e_sn = [domain['domain_size'][1] + domain['padding_bottom'] + domain['padding_top'] + 1 for domain in domains[::-1]],
        e_vert = [30] * num_domains,
        # dx/dy is not the same as in the WPS namelist, instead it is always meters
        # and is written to the geogrid output files (see above).
        dx = dx,
        dy = dy,
        num_metgrid_levels = num_metgrid_levels,
        num_metgrid_soil_levels = num_metgrid_soil_levels
    )

    wrf['physics'] = OrderedDict(
        num_land_cat = num_land_cat
    )

    return wrf