def test_error_non_coord_units(self):
     """ Test error raised if units are provided for a non-coordinate
     constraint """
     constraint_dict = {"name": "probability_of_precipitation_rate_above_threshold"}
     units_dict = {"name": "1"}
     with self.assertRaises(CoordinateNotFoundError):
         apply_extraction(self.precip_cube, constraint_dict, units_dict)
Example #2
0
 def test_list_constraints(self):
     """ Test that a list of constraints behaves correctly """
     constraint_dict = {self.threshold_coord: [0.1, 1.0]}
     constr = iris.Constraint(**constraint_dict)
     cube = apply_extraction(self.precip_cube, constr, self.units_dict)
     reference_data = self.precip_cube.data[1:, :, :]
     self.assertArrayEqual(cube.data, reference_data)
Example #3
0
 def test_subset_global_grid_pacific(self):
     """Extract subset of global lat-lon grid over the international date line"""
     global_pacific_cube = set_up_variable_cube(
         self.global_gridded_cube.data.copy(),
         name="screen_temperature",
         units="degC",
         spatial_grid="latlon",
         domain_corner=(0, 175),
         grid_spacing=2,
     )
     lower_bound = -1.0e-7
     upper_bound = 4 + 1.0e-7
     constraint_dict = {
         "latitude": lambda cell: lower_bound <= cell.point <= upper_bound
     }
     constr = iris.Constraint(**constraint_dict)
     expected_data = np.array(
         [[2.0, 3.0, 4.0], [10.0, 11.0, 12.0], [18.0, 19.0, 20.0]]
     )
     result = apply_extraction(
         global_pacific_cube, constr, longitude_constraint=[179.0, 183.0]
     )
     self.assertArrayAlmostEqual(result.data, expected_data)
     self.assertArrayAlmostEqual(
         result.coord("longitude").points, [179.0, 181.0, 183.0]
     )
     self.assertArrayAlmostEqual(result.coord("latitude").points, [0.0, 2.0, 4.0])
Example #4
0
 def test_subset_global_grid(self):
     """ Extract subset of global lat-lon grid """
     lower_bound = 42 - 1.0e-7
     upper_bound = 52 + 1.0e-7
     constraint_dict = {
         "latitude": lambda cell: lower_bound <= cell.point <= upper_bound
     }
     constr = iris.Constraint(**constraint_dict)
     result = apply_extraction(
         self.global_gridded_cube, constr, longitude_constraint=[0, 7]
     )
     expected_data = np.array(
         [
             [1.0, 2.0, 3.0, 4.0],
             [9.0, 10.0, 11.0, 12.0],
             [17.0, 18.0, 19.0, 20.0],
             [25.0, 26.0, 27.0, 28.0],
         ]
     )
     self.assertArrayAlmostEqual(result.data, expected_data)
     self.assertArrayAlmostEqual(
         result.coord("longitude").points, np.array([0.0, 2.0, 4.0, 6.0])
     )
     self.assertArrayAlmostEqual(
         result.coord("latitude").points, np.array([45.0, 47.0, 49.0, 51.0])
     )
Example #5
0
 def test_subset_uk_grid(self):
     """ Test subsetting a gridded cube. """
     expected_data = np.array(
         [
             [27.0, 28.0, 29.0, 30.0],
             [35.0, 36.0, 37.0, 38.0],
             [43.0, 44.0, 45.0, 46.0],
             [51.0, 52.0, 53.0, 54.0],
         ]
     )
     expected_points = np.array([1000.0, 3000.0, 5000.0, 7000.0])
     lower_bound = 1000 - 1.0e-7
     upper_bound = 7000 + 1.0e-7
     constraint_dict = {
         "projection_x_coordinate": lambda cell: lower_bound
         <= cell.point
         <= upper_bound,
         "projection_y_coordinate": lambda cell: lower_bound
         <= cell.point
         <= upper_bound,
     }
     constr = iris.Constraint(**constraint_dict)
     result = apply_extraction(self.uk_gridded_cube, constr)
     self.assertArrayAlmostEqual(result.data, expected_data)
     for axis in ["x", "y"]:
         coord = f"projection_{axis}_coordinate"
         self.assertArrayAlmostEqual(result.coord(coord).points, expected_points)
 def test_basic_no_units(self):
     """ Test cube extraction for single constraint without units """
     constraint_dict = {"name": "probability_of_precipitation_rate_above_threshold"}
     constr = iris.Constraint(**constraint_dict)
     cube = apply_extraction(self.precip_cube, constr)
     self.assertIsInstance(cube, iris.cube.Cube)
     reference_data = self.precip_cube.data
     self.assertArrayEqual(cube.data, reference_data)
Example #7
0
 def test_basic_with_units(self):
     """ Test cube extraction for single constraint with units """
     constraint_dict = {self.threshold_coord: 0.1}
     constr = iris.Constraint(**constraint_dict)
     cube = apply_extraction(self.precip_cube, constr, self.units_dict)
     self.assertIsInstance(cube, iris.cube.Cube)
     self.assertEqual(cube.coord(self.threshold_coord).units, "m s-1")
     reference_data = self.precip_cube.data[1, :, :]
     self.assertArrayEqual(cube.data, reference_data)
Example #8
0
 def test_range_constraints(self):
     """ Test that a list of constraints behaves correctly. This includes
     converting the units to the units that the constraints is
     defined in."""
     constraint_dict = {"threshold": lambda cell: 0.03 <= cell <= 0.1}
     constr = iris.Constraint(coord_values=constraint_dict)
     cube = apply_extraction(self.precip_cube, constr, self.units_dict)
     reference_data = self.precip_cube.data[:2, :, :]
     self.assertArrayEqual(cube.data, reference_data)
 def test_allow_none(self):
     """ Test function returns None rather than raising an error where
     no subcubes match the required constraints, when unit conversion is
     required """
     constraint_dict = {
         "name": "probability_of_precipitation_rate_above_threshold",
                 self.threshold_coord: 5}
     constr = iris.Constraint(**constraint_dict)
     cube = apply_extraction(self.precip_cube, constr, self.units_dict)
     self.assertFalse(cube)
Example #10
0
 def test_multiple_constraints_with_units(self):
     """ Test behaviour with a list of constraints and units """
     constraint_dict = {
         "name": "probability_of_precipitation_rate_above_threshold",
         self.threshold_coord: lambda cell: any(np.isclose(cell.point, [0.03])),
     }
     constr = iris.Constraint(**constraint_dict)
     cube = apply_extraction(self.precip_cube, constr, self.units_dict)
     self.assertIsInstance(cube, iris.cube.Cube)
     reference_data = self.precip_cube.data[0, :, :]
     self.assertArrayEqual(cube.data, reference_data)
Example #11
0
 def test_multiple_constraints_with_units(self):
     """ Test behaviour with a list of constraints and units """
     constraint_dict = {
         "name": "probability_of_precipitation",
         "threshold": 0.03
     }
     constr = iris.Constraint(**constraint_dict)
     cube = apply_extraction(self.precip_cube, constr, self.units_dict)
     self.assertIsInstance(cube, iris.cube.Cube)
     reference_data = self.precip_cube.data[0, :, :]
     self.assertArrayEqual(cube.data, reference_data)
Example #12
0
 def test_range_constraints(self):
     """ Test that a list of constraints behaves correctly. This includes
     converting the units to the units that the constraints is
     defined in."""
     lower_bound = 0.03 * (1.0 - 1.0e-7)
     upper_bound = 0.1 * (1.0 + 1.0e-7)
     constraint_dict = {
         self.threshold_coord: lambda cell: lower_bound <= cell <= upper_bound
     }
     constr = iris.Constraint(coord_values=constraint_dict)
     cube = apply_extraction(self.precip_cube, constr, self.units_dict)
     reference_data = self.precip_cube.data[:2, :, :]
     self.assertArrayEqual(cube.data, reference_data)
Example #13
0
 def test_basic_without_reconverting_units(self):
     """ Test cube extraction for single constraint with units,
      where the coordinates are not reconverted into the original units."""
     constraint_dict = {
         self.threshold_coord: lambda cell: any(np.isclose(cell.point, [0.1]))
     }
     constr = iris.Constraint(**constraint_dict)
     cube = apply_extraction(
         self.precip_cube, constr, self.units_dict, use_original_units=False
     )
     self.assertIsInstance(cube, iris.cube.Cube)
     self.assertEqual(cube.coord(self.threshold_coord).units, "mm h-1")
     reference_data = self.precip_cube.data[1, :, :]
     self.assertArrayEqual(cube.data, reference_data)
Example #14
0
def process(
    wind_speed: cli.inputcube,
    sigma: cli.inputcube,
    target_orography: cli.inputcube,
    standard_orography: cli.inputcube,
    silhouette_roughness: cli.inputcube,
    vegetative_roughness: cli.inputcube = None,
    *,
    model_resolution: float,
    output_height_level: float = None,
    output_height_level_units="m",
):
    """Wind downscaling.

    Run wind downscaling to apply roughness correction and height correction
    to wind fields as described in Howard and Clark (2007). All inputs must
    be on the same standard grid.

    Args:
        wind_speed (iris.cube.Cube):
            Cube of wind speed on standard grid.
            Any units can be supplied.
        sigma (iris.cube.Cube):
            Cube of standard deviation of model orography height.
            Units of field: m.
        target_orography (iris.cube.Cube):
            Cube of orography to downscale fields to.
            Units of field: m.
        standard_orography (iris.cube.Cube):
            Cube of orography on standard grid. (interpolated model orography).
            Units of field: m.
        silhouette_roughness (iris.cube.Cube):
            Cube of model silhouette roughness.
            Units of field: dimensionless.
        vegetative_roughness (iris.cube.Cube):
            Cube of vegetative roughness length.
            Units of field: m.
        model_resolution (float):
            Original resolution of model orography (before interpolation to
            standard grid)
            Units of field: m.
        output_height_level (float):
            If only a single height level is desired as output from
            wind-downscaling, this option can be used to select the height
            level. If no units are provided with 'output_height_level_units',
            metres are assumed.
        output_height_level_units (str):
            If a single height level is selected as output using
            'output_height_level', this additional argument may be used to
            specify the units of the value entered to select the level.
            e.g hPa.

    Returns:
        iris.cube.Cube:
            The processed Cube.

    Rises:
        ValueError:
            If the requested height value is not found.

    """
    import warnings

    import iris
    from iris.exceptions import CoordinateNotFoundError

    from improver.utilities.cube_extraction import apply_extraction
    from improver.wind_calculations import wind_downscaling

    if output_height_level_units and output_height_level is None:
        warnings.warn(
            "output_height_level_units has been set but no "
            "associated height level has been provided. These units "
            "will have no effect."
        )
    try:
        wind_speed_iterator = wind_speed.slices_over("realization")
    except CoordinateNotFoundError:
        wind_speed_iterator = [wind_speed]
    wind_speed_list = iris.cube.CubeList()
    for wind_speed_slice in wind_speed_iterator:
        result = wind_downscaling.RoughnessCorrection(
            silhouette_roughness,
            sigma,
            target_orography,
            standard_orography,
            model_resolution,
            z0_cube=vegetative_roughness,
            height_levels_cube=None,
        )(wind_speed_slice)
        wind_speed_list.append(result)

    wind_speed = wind_speed_list.merge_cube()
    non_dim_coords = [x.name() for x in wind_speed.coords(dim_coords=False)]
    if "realization" in non_dim_coords:
        wind_speed = iris.util.new_axis(wind_speed, "realization")
    if output_height_level is not None:
        constraints = {"height": output_height_level}
        units = {"height": output_height_level_units}
        single_level = apply_extraction(
            wind_speed, iris.Constraint(**constraints), units
        )
        if not single_level:
            raise ValueError(
                "Requested height level not found, no cube "
                "returned. Available height levels are:\n"
                "{0:}\nin units of {1:}".format(
                    wind_speed.coord("height").points, wind_speed.coord("height").units
                )
            )
        wind_speed = single_level
    return wind_speed
Example #15
0
def process(wind_speed, silhouette_roughness, sigma, target_orog,
            standard_orog, model_resolution, height_levels=None,
            veg_roughness_cube=None, output_height_level=None,
            output_height_level_units='m'):
    """Module to run wind downscaling.

    Run wind downscaling to apply roughness correction and height correction
    to wind fields as described in Howard and Clark (2007). All inputs must
    be on the same standard grid.

    Args:
        wind_speed (iris.cube.Cube):
            Cube of wind speed on standard grid.
            Any units can be supplied.
        silhouette_roughness (iris.cube.Cube):
            Cube of model silhouette roughness.
            Units of field: dimensionless.
        sigma (iris.cube.Cube):
            Cube of standard deviation of model orography height.
            Units of field: m.
        target_orog (iris.cube.Cube):
            Cube of orography to downscale fields to.
            Units of field: m.
        standard_orog (iris.cube.Cube):
            Cube of orography on standard grid. (interpolated model orography).
            Units of field: m.
        model_resolution (float):
            Original resolution of model orography (before interpolation to
            standard grid)
            Units of field: m.
        height_levels (iris.cube.Cube):
            Cube of height levels coincident with wind direction.
            Units of field: m.
            Default is None.
        veg_roughness_cube (iris.cube.Cube):
            Cube of vegetative roughness length.
            Units of field: m.
            Default is None.
        output_height_level (float):
            If only a single height level is desired as output from
            wind-downscaling, this option can be used to select the height
            level. If no units are provided with 'output_height_level_units',
            metres are assumed.
            Default is None.
        output_height_level_units (str):
            If a single height level is selected as output using
            'output_height_level', this additional argument may be used to
            specify the units of the value entered to select the level.
            e.g hPa.
            Default is 'm'.

    Returns:
        wind_speed (iris.cube.Cube):
            The processed Cube.

    Rises:
        ValueError:
            If the requested height value is not found.

    """
    if output_height_level_units and output_height_level is None:
        warnings.warn('output_height_level_units has been set but no '
                      'associated height level has been provided. These units '
                      'will have no effect.')
    try:
        wind_speed_iterator = wind_speed.slices_over('realization')
    except CoordinateNotFoundError:
        wind_speed_iterator = [wind_speed]
    wind_speed_list = iris.cube.CubeList()
    for wind_speed_slice in wind_speed_iterator:
        result = (
            wind_downscaling.RoughnessCorrection(
                silhouette_roughness, sigma, target_orog,
                standard_orog, model_resolution,
                z0_cube=veg_roughness_cube,
                height_levels_cube=height_levels).process(wind_speed_slice))
        wind_speed_list.append(result)
    # Temporary fix for chunking problems when merging cubes
    max_npoints = max([np.prod(cube.data.shape) for cube in wind_speed_list])
    while iris._lazy_data._MAX_CHUNK_SIZE < max_npoints:
        iris._lazy_data._MAX_CHUNK_SIZE *= 2
    wind_speed = wind_speed_list.merge_cube()
    non_dim_coords = [x.name() for x in wind_speed.coords(dim_coords=False)]
    if 'realization' in non_dim_coords:
        wind_speed = iris.util.new_axis(wind_speed, 'realization')
    if output_height_level is not None:
        constraints = {'height': output_height_level}
        units = {'height': output_height_level_units}
        single_level = apply_extraction(
            wind_speed, iris.Constraint(**constraints), units)
        if not single_level:
            raise ValueError(
                'Requested height level not found, no cube '
                'returned. Available height levels are:\n'
                '{0:}\nin units of {1:}'.format(
                    wind_speed.coord('height').points,
                    wind_speed.coord('height').units))
        wind_speed = single_level
    return wind_speed
def main(argv=None):
    """Load in arguments and get going."""
    parser = ArgParser(
        description='Run wind downscaling to apply roughness correction and'
        ' height correction to wind fields (as described in'
        ' Howard and Clark [2007]). All inputs must be on the same'
        ' standard grid')
    parser.add_argument('wind_speed_filepath',
                        metavar='WIND_SPEED_FILE',
                        help='Location of the wind speed on standard grid'
                        ' file. Any units can be supplied.')
    parser.add_argument('silhouette_roughness_filepath',
                        metavar='AOS_FILE',
                        help='Location of model silhouette roughness file. '
                        'Units of field: dimensionless')
    parser.add_argument('sigma_filepath',
                        metavar='SIGMA_FILE',
                        help='Location of standard deviation of model '
                        'orography height file. Units of field: m')
    parser.add_argument('target_orog_filepath',
                        metavar='TARGET_OROGRAPHY_FILE',
                        help='Location of target orography file to downscale'
                        ' fields to.'
                        'Units of field: m')
    parser.add_argument('standard_orog_filepath',
                        metavar='STANDARD_OROGRAPHY_FILE',
                        help='Location of orography on standard grid file '
                        '(interpolated model orography.'
                        ' Units of field: m')
    parser.add_argument('model_resolution',
                        metavar='MODEL_RESOLUTION',
                        help='Original resolution of model orography (before'
                        ' interpolation to standard grid).'
                        ' Units of field: m')
    parser.add_argument('output_filepath',
                        metavar='OUTPUT_FILE',
                        help='The output path for the processed NetCDF')
    parser.add_argument('--output_height_level',
                        metavar='OUTPUT_HEIGHT_LEVEL',
                        default=None,
                        help='If only a single height level is desired as '
                        'output from wind-downscaling, this option can be '
                        'used to select the height level. If no units are '
                        'provided with the --output_height_level_units '
                        'option, metres are assumed.')
    parser.add_argument('--output_height_level_units',
                        metavar='OUTPUT_HEIGHT_LEVEL_UNITS',
                        default='m',
                        help='If a single height level is selected as output '
                        'using the --output_height_level option, this '
                        'additional argument may be used to specify the units '
                        'of the value entered to select the level. e.g. hPa')
    parser.add_argument('--height_levels_filepath',
                        metavar='HEIGHT_LEVELS_FILE',
                        help='Location of file containing height levels '
                        'coincident with wind speed field.')
    parser.add_argument('--veg_roughness_filepath',
                        metavar='VEGETATIVE_ROUGHNESS_LENGTH_FILE',
                        help='Location of vegetative roughness length file.'
                        ' Units of field: m')
    args = parser.parse_args(args=argv)

    if args.output_height_level_units and not args.output_height_level:
        warnings.warn('--output_height_level_units has been set but no '
                      'associated height level has been provided. These units '
                      'will have no effect.')

    wind_speed = load_cube(args.wind_speed_filepath)
    silhouette_roughness_filepath = load_cube(
        args.silhouette_roughness_filepath)
    sigma = load_cube(args.sigma_filepath)
    target_orog = load_cube(args.target_orog_filepath)
    standard_orog = load_cube(args.standard_orog_filepath)
    if args.height_levels_filepath:
        height_levels = load_cube(args.height_levels_filepath)
    else:
        height_levels = None
    if args.veg_roughness_filepath:
        veg_roughness_cube = load_cube(args.veg_roughness_filepath)
    else:
        veg_roughness_cube = None
    try:
        wind_speed_iterator = wind_speed.slices_over('realization')
    except CoordinateNotFoundError:
        wind_speed_iterator = [wind_speed]
    wind_speed_list = iris.cube.CubeList()
    for wind_speed_slice in wind_speed_iterator:
        result = (wind_downscaling.RoughnessCorrection(
            silhouette_roughness_filepath,
            sigma,
            target_orog,
            standard_orog,
            float(args.model_resolution),
            z0_cube=veg_roughness_cube,
            height_levels_cube=height_levels).process(wind_speed_slice))
        wind_speed_list.append(result)

    # Temporary fix for chunking problems when merging cubes
    max_npoints = max([np.prod(cube.data.shape) for cube in wind_speed_list])
    while iris._lazy_data._MAX_CHUNK_SIZE < max_npoints:
        iris._lazy_data._MAX_CHUNK_SIZE *= 2

    wind_speed = wind_speed_list.merge_cube()
    non_dim_coords = [x.name() for x in wind_speed.coords(dim_coords=False)]
    if 'realization' in non_dim_coords:
        wind_speed = iris.util.new_axis(wind_speed, 'realization')

    if args.output_height_level:
        constraints = {'height': float(args.output_height_level)}
        units = {'height': args.output_height_level_units}
        single_level = apply_extraction(wind_speed,
                                        iris.Constraint(**constraints), units)
        if not single_level:
            raise ValueError('Requested height level not found, no cube '
                             'returned. Available height levels are:\n'
                             '{0:}\nin units of {1:}'.format(
                                 wind_speed.coord('height').points,
                                 wind_speed.coord('height').units))
        wind_speed = single_level

    save_netcdf(wind_speed, args.output_filepath)