示例#1
0
    def setUp(self):
        """Create a class-object containing the necessary cubes.
        All cubes are on the target grid. Here this is defined as a 5x5 grid.
        The cubes have values of one everywhere except:
        input_land: zeroes (sea points) at [0, 1], [4, 4]
        output_land: zeroes (sea points) at [0, 0], [1, 1]
        input_cube: 0. at [1, 1]; 0.5 at [0, 1]; 0.1 at [4, 4]
        These should trigger all the behavior we expect.
        """
        self.plugin = AdjustLandSeaPoints(vicinity_radius=2200.0)

        self.output_land = squeeze(
            set_up_cube(
                num_grid_points=5, zero_point_indices=((0, 0, 1, 1), (0, 0, 0, 0))
            )
        )

        self.cube = squeeze(
            set_up_cube(num_grid_points=5, zero_point_indices=((0, 0, 1, 1),))
        )
        self.cube.data[0, 1] = 0.5
        self.cube.data[4, 4] = 0.1

        self.input_land = squeeze(
            set_up_cube(
                num_grid_points=5, zero_point_indices=((0, 0, 0, 1), (0, 0, 4, 4))
            )
        )

        # Lat-lon coords for reprojection
        # These coords result in a 1:1 regridding with the above cubes.
        x_coord = DimCoord(
            np.linspace(-3.281, -3.153, 5),
            standard_name="longitude",
            units="degrees",
            coord_system=ELLIPSOID,
        )
        y_coord = DimCoord(
            np.linspace(54.896, 54.971, 5),
            standard_name="latitude",
            units="degrees",
            coord_system=ELLIPSOID,
        )
        self.input_land_ll = Cube(
            self.input_land.data,
            long_name="land_sea_mask",
            units="1",
            dim_coords_and_dims=[(y_coord, 0), (x_coord, 1)],
        )
示例#2
0
 def setUp(self):
     """
     Set up a basic cube and linear weights cube for the process
     method. Input cube has 2 thresholds and 3 forecast_reference_times
     """
     thresholds = [10, 20]
     data = np.ones((2, 2, 3), dtype=np.float32)
     cycle1 = set_up_probability_cube(
         data,
         thresholds,
         spatial_grid="equalarea",
         time=datetime(2017, 11, 10, 4, 0),
         frt=datetime(2017, 11, 10, 0, 0),
     )
     cycle2 = set_up_probability_cube(
         data,
         thresholds,
         spatial_grid="equalarea",
         time=datetime(2017, 11, 10, 4, 0),
         frt=datetime(2017, 11, 10, 1, 0),
     )
     cycle3 = set_up_probability_cube(
         data,
         thresholds,
         spatial_grid="equalarea",
         time=datetime(2017, 11, 10, 4, 0),
         frt=datetime(2017, 11, 10, 2, 0),
     )
     self.cube_to_collapse = CubeList([cycle1, cycle2, cycle3]).merge_cube()
     self.cube_to_collapse = squeeze(self.cube_to_collapse)
     self.cube_to_collapse.rename("weights")
     # This input array has 3 forecast reference times and 2 thresholds.
     # The two thresholds have the same weights.
     self.cube_to_collapse.data = np.array(
         [
             [[[1, 0, 1], [1, 1, 1]], [[1, 0, 1], [1, 1, 1]]],
             [[[0, 0, 1], [0, 1, 1]], [[0, 0, 1], [0, 1, 1]]],
             [[[1, 1, 1], [1, 1, 1]], [[1, 1, 1], [1, 1, 1]]],
         ],
         dtype=np.float32,
     )
     self.cube_to_collapse.data = np.ma.masked_equal(self.cube_to_collapse.data, 0)
     # Create a one_dimensional weights cube by slicing the larger
     # weights cube.
     # The resulting cube only has a forecast_reference_time coordinate.
     self.one_dimensional_weights_cube = self.cube_to_collapse[:, 0, 0, 0]
     self.one_dimensional_weights_cube.remove_coord("projection_x_coordinate")
     self.one_dimensional_weights_cube.remove_coord("projection_y_coordinate")
     self.one_dimensional_weights_cube.remove_coord(
         find_threshold_coordinate(self.one_dimensional_weights_cube)
     )
     self.one_dimensional_weights_cube.data = np.array(
         [0.2, 0.5, 0.3], dtype=np.float32
     )
     self.plugin = SpatiallyVaryingWeightsFromMask(
         "forecast_reference_time", fuzzy_length=2
     )
     self.plugin_no_fuzzy = SpatiallyVaryingWeightsFromMask(
         "forecast_reference_time", fuzzy_length=1
     )
示例#3
0
 def load_multiple_files_callback(cube, field, filename):
     from iris.util import squeeze
     # We need to remove these global attributes when reading multiple files so that the cubes can be properly merged
     cube.attributes.pop('host_name', None)
     cube.attributes.pop('date_time', None)
     cube.attributes.pop('history', None)
     return squeeze(cube)
示例#4
0
 def setUp(self):
     """Set up an example cube to test with"""
     thresholds = [10]
     data = np.ones((1, 2, 3), dtype=np.float32)
     cycle1 = set_up_probability_cube(
         data,
         thresholds,
         time=datetime(2017, 11, 10, 4, 0),
         frt=datetime(2017, 11, 10, 0, 0),
     )
     cycle2 = set_up_probability_cube(
         data,
         thresholds,
         time=datetime(2017, 11, 10, 4, 0),
         frt=datetime(2017, 11, 10, 1, 0),
     )
     cycle3 = set_up_probability_cube(
         data,
         thresholds,
         time=datetime(2017, 11, 10, 4, 0),
         frt=datetime(2017, 11, 10, 2, 0),
     )
     self.cube = CubeList([cycle1, cycle2, cycle3]).merge_cube()
     self.cube = squeeze(self.cube)
     self.plugin = SpatiallyVaryingWeightsFromMask()
示例#5
0
def squeeze(data):
    from iris.cube import Cube
    from iris.util import squeeze
    from cis.data_io.gridded_data import make_from_cube
    if isinstance(data, Cube):
        return make_from_cube(squeeze(data))
    else:
        return data
示例#6
0
文件: HadGEM.py 项目: cpaulik/cis
 def load_multiple_files_callback(cube, field, filename):
     from iris.util import squeeze
     # We need to remove the history field when reading multiple files so that the cubes can be properly merged
     cube.attributes.pop('history')
     # cube.coord(name_or_coord='Hybrid height').attributes['formula_terms'] = 'a: lev b: b orog: orog'
     # We also need to remove the length one time dimension so that the cube can be merged correctly (iris preserves
     #  the value as a scalar which then gets converted back into a full coordinate again on merge).
     return squeeze(cube)
示例#7
0
文件: utils.py 项目: cedadev/cis
def squeeze(data):
    from iris.cube import Cube
    from iris.util import squeeze
    from cis.data_io.gridded_data import make_from_cube
    if isinstance(data, Cube):
        return make_from_cube(squeeze(data))
    else:
        return data
示例#8
0
 def load_multiple_files_callback(cube, field, filename):
     from iris.util import squeeze
     # We need to remove the history field when reading multiple files so that the cubes can be properly merged
     cube.attributes.pop('history')
     # cube.coord(name_or_coord='Hybrid height').attributes['formula_terms'] = 'a: lev b: b orog: orog'
     # We also need to remove the length one time dimension so that the cube can be merged correctly (iris preserves
     #  the value as a scalar which then gets converted back into a full coordinate again on merge).
     return squeeze(cube)
示例#9
0
 def test_ice_large_with_fc(self):
     """Test that large VII probs do increase zero lightning risk when
     forecast lead time is non-zero (two forecast_period points)"""
     self.ice_cube.data[:, 1, 1] = 1.
     self.fg_cube.data[0, 1, 1] = 0.
     self.fg_cube.coord('forecast_period').points = [1.]  # hours
     fg_cube_next = self.fg_cube.copy()
     time_pt, = self.fg_cube.coord('time').points
     fg_cube_next.coord('time').points = [time_pt + 2.]  # hours
     fg_cube_next.coord('forecast_period').points = [3.]  # hours
     self.fg_cube = CubeList([squeeze(self.fg_cube),
                              squeeze(fg_cube_next)]).merge_cube()
     expected = self.fg_cube.copy()
     # expected.data contains all ones except:
     expected.data[0, 1, 1] = 0.54
     expected.data[1, 1, 1] = 0.0
     result = self.plugin.apply_ice(self.fg_cube, self.ice_cube)
     self.assertArrayAlmostEqual(result.data, expected.data)
示例#10
0
 def setUp(self):
     """
     Set up a basic weights cube with 2 thresholds to multiple with
     a cube with one_dimensional weights.
     """
     thresholds = [10, 20]
     data = np.ones((2, 2, 3), dtype=np.float32)
     cycle1 = set_up_probability_cube(
         data,
         thresholds,
         spatial_grid="equalarea",
         time=datetime(2017, 11, 10, 4, 0),
         frt=datetime(2017, 11, 10, 0, 0),
     )
     cycle2 = set_up_probability_cube(
         data,
         thresholds,
         spatial_grid="equalarea",
         time=datetime(2017, 11, 10, 4, 0),
         frt=datetime(2017, 11, 10, 1, 0),
     )
     cycle3 = set_up_probability_cube(
         data,
         thresholds,
         spatial_grid="equalarea",
         time=datetime(2017, 11, 10, 4, 0),
         frt=datetime(2017, 11, 10, 2, 0),
     )
     self.spatial_weights_cube = CubeList([cycle1, cycle2,
                                           cycle3]).merge_cube()
     self.spatial_weights_cube = squeeze(self.spatial_weights_cube)
     self.spatial_weights_cube.rename("weights")
     # This input array has 3 forecast reference times and 2 thresholds.
     # The two thresholds have the same weights.
     self.spatial_weights_cube.data = np.array(
         [
             [[[1, 0, 1], [1, 0, 1]], [[1, 0, 1], [1, 0, 1]]],
             [[[0, 0, 1], [0, 0, 1]], [[0, 0, 1], [0, 0, 1]]],
             [[[1, 1, 1], [1, 1, 1]], [[1, 1, 1], [1, 1, 1]]],
         ],
         dtype=np.float32,
     )
     # Create a one_dimensional weights cube by slicing the
     # larger weights cube.
     # The resulting cube only has a forecast_reference_time coordinate.
     self.one_dimensional_weights_cube = self.spatial_weights_cube[:, 0, 0,
                                                                   0]
     self.one_dimensional_weights_cube.remove_coord(
         "projection_x_coordinate")
     self.one_dimensional_weights_cube.remove_coord(
         "projection_y_coordinate")
     self.one_dimensional_weights_cube.remove_coord(
         find_threshold_coordinate(self.one_dimensional_weights_cube))
     self.one_dimensional_weights_cube.data = np.array([0.2, 0.5, 0.3],
                                                       dtype=np.float32)
     self.plugin = SpatiallyVaryingWeightsFromMask()
示例#11
0
文件: plot.py 项目: cpaulik/cis
 def _remove_length_one_dimensions(self, packed_data):
     from iris.util import squeeze
     from cis.data_io.gridded_data import GriddedData
     listify(packed_data)
     new_data_list = []
     for data in packed_data:
         if data.is_gridded:
             new_data_list.append(GriddedData.make_from_cube(squeeze(data)))
         else:
             new_data_list.append(data)
     return new_data_list
示例#12
0
def main():
    arguments = docopt.docopt(__doc__)

    print(arguments)

    cubes = iris.cube.CubeList(
        [squeeze(cube) for cube in irise.load(arguments["<model_data>"])])

    with open(arguments["<yaml_file>"]) as f:
        info = yaml.safe_load(f)

    generate_overview(cubes, info, path=arguments["--output_path"])
    def test_not_in_vicinity(self):
        """Test for no change if the matching point is too far away."""
        # We need larger arrays for this.
        # Define 5 x 5 arrays with output sea point at [1, 1] and input sea
        # point at [4, 4]. The alternative value of 0.5 at [4, 4] should not
        # be selected with a small vicinity_radius.
        self.plugin = AdjustLandSeaPoints(vicinity_radius=2200.0)
        cube = squeeze(
            set_up_cube(num_grid_points=5,
                        zero_point_indices=((0, 0, 1, 1), )))
        self.plugin.output_land = cube.copy()
        self.plugin.nearest_cube = cube.copy()
        self.plugin.nearest_cube.data[4, 4] = 0.5
        self.plugin.output_cube = self.plugin.nearest_cube.copy()
        self.plugin.input_land = squeeze(
            set_up_cube(num_grid_points=5,
                        zero_point_indices=((0, 0, 4, 4), )))

        output_cube = self.plugin.output_cube.copy()
        self.plugin.correct_where_input_true(0)
        self.assertArrayEqual(output_cube.data, self.plugin.output_cube.data)
 def setUp(self):
     """Create a class-object containing the necessary cubes.
     All cubes are on the target grid. Here this is defined as a 3x3 grid.
     The grid contains ones everywhere except the centre point (a zero).
     The output_cube has a value of 0.5 at [0, 1].
     The move_sea_point cube has the zero value at [0, 1] instead of [1, 1],
     this allows it to be used in place of input_land to trigger the
     expected behaviour in the function.
     """
     self.plugin = AdjustLandSeaPoints(vicinity_radius=2200.0)
     cube = squeeze(
         set_up_cube(num_grid_points=3,
                     zero_point_indices=((0, 0, 1, 1), )))
     self.plugin.input_land = cube.copy()
     self.plugin.output_land = cube.copy()
     self.plugin.nearest_cube = cube.copy()
     self.plugin.nearest_cube.data[0, 1] = 0.5
     self.plugin.output_cube = self.plugin.nearest_cube.copy()
     self.move_sea_point = squeeze(
         set_up_cube(num_grid_points=3,
                     zero_point_indices=((0, 0, 0, 1), )))
示例#15
0
 def load_single_file_callback(cube, field, filename):
     from iris.util import squeeze
     from datetime import datetime
     from iris.coords import AuxCoord
     datetime = datetime(cube.attributes['HDFEOS_ADDITIONAL_FILE_ATTRIBUTES.GranuleYear'],
                         cube.attributes['HDFEOS_ADDITIONAL_FILE_ATTRIBUTES.GranuleMonth'],
                         cube.attributes['HDFEOS_ADDITIONAL_FILE_ATTRIBUTES.GranuleDay'],
                         12, 0, 0)
     cube.add_aux_coord(AuxCoord([datetime], standard_name='time'))
     cube.units = 'dobson'
     cube.convert_units()
     # Sometimes it's useful to remove length one dimensions from cubes, squeeze does this for us...
     return squeeze(cube)
示例#16
0
 def load_single_file_callback(cube, field, filename):
     from iris.util import squeeze
     from datetime import datetime
     from iris.coords import AuxCoord
     datetime = datetime(
         cube.attributes['HDFEOS_ADDITIONAL_FILE_ATTRIBUTES.GranuleYear'],
         cube.attributes['HDFEOS_ADDITIONAL_FILE_ATTRIBUTES.GranuleMonth'],
         cube.attributes['HDFEOS_ADDITIONAL_FILE_ATTRIBUTES.GranuleDay'],
         12, 0, 0)
     cube.add_aux_coord(AuxCoord([datetime], standard_name='time'))
     cube.units = 'dobson'
     cube.convert_units()
     # Sometimes it's useful to remove length one dimensions from cubes, squeeze does this for us...
     return squeeze(cube)
示例#17
0
 def setUp(self):
     """
     Set up a basic 2D cube with a large enough grid to see the
     effect of the fuzzy weights.
     """
     thresholds = [10]
     data = np.ones((1, 7, 7), dtype=np.float32)
     self.cube = set_up_probability_cube(
         data,
         thresholds,
         spatial_grid="equalarea",
         time=datetime(2017, 11, 10, 4, 0),
         frt=datetime(2017, 11, 10, 0, 0),
     )
     self.cube = squeeze(self.cube)
示例#18
0
 def setUp(self):
     """Set up a cube with 2 thresholds to test normalisation. We are
     testing normalising along the leading dimension in this cube."""
     thresholds = [10, 20]
     data = np.ones((2, 2, 3), dtype=np.float32)
     cycle1 = set_up_probability_cube(
         data,
         thresholds,
         spatial_grid="equalarea",
         time=datetime(2017, 11, 10, 4, 0),
         frt=datetime(2017, 11, 10, 0, 0),
     )
     cycle2 = set_up_probability_cube(
         data,
         thresholds,
         spatial_grid="equalarea",
         time=datetime(2017, 11, 10, 4, 0),
         frt=datetime(2017, 11, 10, 1, 0),
     )
     cycle3 = set_up_probability_cube(
         data,
         thresholds,
         spatial_grid="equalarea",
         time=datetime(2017, 11, 10, 4, 0),
         frt=datetime(2017, 11, 10, 2, 0),
     )
     self.spatial_weights_cube = CubeList([cycle1, cycle2,
                                           cycle3]).merge_cube()
     self.spatial_weights_cube = squeeze(self.spatial_weights_cube)
     self.spatial_weights_cube.rename("weights")
     # This input array has 3 forecast reference times and 2 thresholds.
     # The two thresholds have the same weights.
     self.spatial_weights_cube.data = np.array(
         [
             [[[0.2, 0, 0.2], [0.2, 0, 0.2]], [[0.2, 0, 0.2], [0.2, 0, 0.2]]
              ],
             [[[0, 0, 0.5], [0, 0, 0.5]], [[0, 0, 0.5], [0, 0, 0.5]]],
             [
                 [[0.3, 0.3, 0.3], [0.3, 0.3, 0.3]],
                 [[0.3, 0.3, 0.3], [0.3, 0.3, 0.3]],
             ],
         ],
         dtype=np.float32,
     )
     self.plugin = SpatiallyVaryingWeightsFromMask()
示例#19
0
 def setUp(self):
     """
     Set up a basic input cube. Input cube has 2 thresholds on and 3
     forecast_reference_times
     """
     thresholds = [10, 20]
     data = np.ones((2, 2, 3), dtype=np.float32)
     cycle1 = set_up_probability_cube(
         data,
         thresholds,
         spatial_grid="equalarea",
         time=datetime(2017, 11, 10, 4, 0),
         frt=datetime(2017, 11, 10, 0, 0),
     )
     cycle2 = set_up_probability_cube(
         data,
         thresholds,
         spatial_grid="equalarea",
         time=datetime(2017, 11, 10, 4, 0),
         frt=datetime(2017, 11, 10, 1, 0),
     )
     cycle3 = set_up_probability_cube(
         data,
         thresholds,
         spatial_grid="equalarea",
         time=datetime(2017, 11, 10, 4, 0),
         frt=datetime(2017, 11, 10, 2, 0),
     )
     self.cube_to_collapse = CubeList([cycle1, cycle2, cycle3]).merge_cube()
     self.cube_to_collapse = squeeze(self.cube_to_collapse)
     self.cube_to_collapse.rename("weights")
     # This input array has 3 forecast reference times and 2 thresholds.
     # The two thresholds have the same weights.
     self.cube_to_collapse.data = np.array(
         [
             [[[1, 0, 1], [1, 1, 1]], [[1, 0, 1], [1, 1, 1]]],
             [[[0, 0, 1], [0, 1, 1]], [[0, 0, 1], [0, 1, 1]]],
             [[[1, 1, 1], [1, 1, 1]], [[1, 1, 1], [1, 1, 1]]],
         ],
         dtype=np.float32,
     )
     self.cube_to_collapse.data = np.ma.masked_equal(
         self.cube_to_collapse.data, 0)
     self.plugin = SpatiallyVaryingWeightsFromMask(
         "forecast_reference_time")
 def test_ice_large_with_fc(self):
     """Test that large VII probs do increase zero lightning risk when
     forecast lead time is non-zero (three forecast_period points)"""
     self.ice_cube.data[:, 1, 1] = 1.0
     self.fg_cube.data[1, 1] = 0.0
     frt_point = self.fg_cube.coord("forecast_reference_time").points[0]
     fg_cube_input = CubeList([])
     for fc_time in np.array([1, 2.5, 3]) * 3600:  # seconds
         fg_cube_next = self.fg_cube.copy()
         fg_cube_next.coord("time").points = [frt_point + fc_time]
         fg_cube_next.coord("forecast_period").points = [fc_time]
         fg_cube_input.append(squeeze(fg_cube_next))
     fg_cube_input = fg_cube_input.merge_cube()
     expected = fg_cube_input.copy()
     # expected.data contains all ones except:
     expected.data[0, 1, 1] = 0.54
     expected.data[1, 1, 1] = 0.0
     expected.data[2, 1, 1] = 0.0
     result = self.plugin.apply_ice(fg_cube_input, self.ice_cube)
     self.assertArrayAlmostEqual(result.data, expected.data)
示例#21
0
 def setUp(self):
     """Create cubes with a single zero prob(precip) point.
     The cubes look like this:
     precipitation_amount / (kg m^-2)
     Dimension coordinates:
         time: 1;
         projection_y_coordinate: 3;
         projection_x_coordinate: 3;
     Auxiliary coordinates:
         forecast_period (on time coord): 0.0 hours (simulates nowcast data)
     Scalar coordinates:
         forecast_reference_time: 2015-11-23 03:00:00
     Data:
     self.fg_cube:
         All points contain float(1.)
         Cube name is "probability_of_lightning".
     self.ice_cube:
         With extra coordinate of length(3) "threshold" containing
         points [0.5, 1., 2.] kg m^-2.
         Time and forecast_period dimensions "sqeezed" to be Scalar coords.
         All points contain float(0.)
         Cube name is "probability_of_vertical_integral_of_ice".
     """
     self.fg_cube = add_forecast_reference_time_and_forecast_period(
         set_up_cube_with_no_realizations(zero_point_indices=[],
                                          num_grid_points=3),
         fp_point=0.0)
     self.fg_cube.rename("probability_of_lightning")
     self.ice_cube = squeeze(
         add_forecast_reference_time_and_forecast_period(set_up_cube(
             num_realization_points=3,
             zero_point_indices=[],
             num_grid_points=3),
                                                         fp_point=0.0))
     threshold_coord = self.ice_cube.coord('realization')
     threshold_coord.points = [0.5, 1.0, 2.0]
     threshold_coord.rename('threshold')
     threshold_coord.units = cf_units.Unit('kg m^-2')
     self.ice_cube.data = np.zeros_like(self.ice_cube.data)
     self.ice_cube.rename("probability_of_vertical_integral_of_ice")
     self.plugin = Plugin()
示例#22
0
    def load_single_file_callback(cube, field, filename):
        from iris.util import squeeze
        # We need to remove the history field when reading multiple files so that the cubes can be properly merged
        cube.attributes.pop('history')

        # Fix the altitude coordinate
        alt_coord = cube.coords(name_or_coord='hybrid_ht_1')
        if alt_coord:
            alt_coord[0].rename('altitude')
            alt_coord[0].attributes = {}
            alt_coord[0].units = Unit('m')

        # Remove the scalar time coord
        if len(cube.coord('t').points) == 1:
            cube.remove_coord(cube.coord(name_or_coord='t'))
        else:
            cube.coord(name_or_coord='t').rename('time')

        # We also need to remove the length one time dimension so that the cube can be merged correctly (iris preserves
        #  the value as a scalar which then gets converted back into a full coordinate again on merge).
        return squeeze(cube)
示例#23
0
def correct_analyses(cubes):
    newcubes = CubeList()

    for cube in cubes:
        # Squeeze cubes dimensions
        newcube = squeeze(cube)

        # Give time coordinate proper name
        newcube.coord('t').rename('time')

        # Correct dimensional coordinates
        z, y, x, t = newcube.coords()

        z.rename('level_height')
        z.units = 'm'
        z.attributes = {'positive': 'up'}

        y.rename('latitude')
        y.coord_system = lat.coord_system
        y.units = lat.units

        x.rename('longitude')
        x.coord_system = lon.coord_system
        x.units = lon.units

        newcubes.append(newcube)

    # Correct cube names
    for before, after in name_pairs:
        newcubes.extract(before)[0].rename(after)

    # Correct units
    for name, unit in units:
        newcubes.extract(name)[0].units = unit

    return newcubes
示例#24
0
 def test_missing_dim(self):
     """Check that an error is raised if missing dimensional coordinate"""
     single_percentile = squeeze(self.percentiles_land[0])
     message = "Input cubes do not have the same dimension coordinates"
     with self.assertRaisesRegex(ValueError, message):
         merge_land_and_sea(single_percentile, self.percentiles_sea)
示例#25
0
 def setUp(self):
     """Create cubes with a single zero prob(precip) point.
     The cubes look like this:
     precipitation_amount / (kg m^-2)
     Dimension coordinates:
         time: 1;
         projection_y_coordinate: 3;
         projection_x_coordinate: 3;
     Auxiliary coordinates:
         forecast_period (on time coord): 4.0 hours (simulates UM data)
     Scalar coordinates:
         forecast_reference_time: 2015-11-23 03:00:00
     Data:
     self.cube:
         Describes the nowcast fields to be calculated.
         forecast_period (on time coord): 0.0 hours (simulates nowcast data)
         All points contain float(1.) except the
         zero point [0, 1, 1] which is float(0.)
     self.fg_cube:
         All points contain float(1.)
     self.ltng_cube:
         forecast_period (on time coord): 0.0 hours (simulates nowcast data)
         All points contain float(1.)
     self.precip_cube:
         With extra coordinate of length(3) "threshold" containing
         points [0.5, 7., 35.] mm hr-1.
         All points contain float(1.) except the
         zero point [0, 0, 1, 1] which is float(0.)
         and [1:, 0, ...] which are float(0.)
     self.vii_cube:
         With extra coordinate of length(3) "threshold" containing
         points [0.5, 1., 2.] kg m^-2.
         forecast_period (on time coord): 0.0 hours (simulates nowcast data)
         Time and forecast_period dimensions "sqeezed" to be Scalar coords.
         All points contain float(0.)
     """
     self.cube = add_forecast_reference_time_and_forecast_period(
         set_up_cube_with_no_realizations(zero_point_indices=((0, 1, 1), ),
                                          num_grid_points=3),
         fp_point=0.0)
     self.fg_cube = add_forecast_reference_time_and_forecast_period(
         set_up_cube_with_no_realizations(zero_point_indices=[],
                                          num_grid_points=3))
     self.ltng_cube = add_forecast_reference_time_and_forecast_period(
         set_up_cube_with_no_realizations(zero_point_indices=[],
                                          num_grid_points=3),
         fp_point=0.0)
     self.precip_cube = (add_forecast_reference_time_and_forecast_period(
         set_up_cube(num_realization_points=3,
                     zero_point_indices=((0, 1, 1), ),
                     num_grid_points=3),
         fp_point=0.0))
     threshold_coord = self.precip_cube.coord('realization')
     threshold_coord.points = [0.5, 7.0, 35.0]
     threshold_coord.rename('threshold')
     threshold_coord.units = cf_units.Unit('mm hr-1')
     self.precip_cube.data[1:, 0, ...] = 0.
     # iris.util.queeze is applied here to demote the singular coord "time"
     # to a scalar coord.
     self.vii_cube = squeeze(
         add_forecast_reference_time_and_forecast_period(set_up_cube(
             num_realization_points=3,
             zero_point_indices=[],
             num_grid_points=3),
                                                         fp_point=0.0))
     threshold_coord = self.vii_cube.coord('realization')
     threshold_coord.points = [0.5, 1.0, 2.0]
     threshold_coord.rename('threshold')
     threshold_coord.units = cf_units.Unit('kg m^-2')
     self.vii_cube.data = np.zeros_like(self.vii_cube.data)
     self.plugin = Plugin()
def set_up_lightning_test_cubes(validity_time=dt(2015, 11, 23, 7),
                                fg_frt=dt(2015, 11, 23, 3),
                                grid_points=3):
    """Set up five cubes for testing nowcast lightning.

    The cube coordinates look like this:
        Dimension coordinates:
            projection_y_coordinate: grid_points;
            projection_x_coordinate: grid_points;
        Scalar coordinates:
            time: 2015-11-23 07:00:00
            forecast_reference_time: 2015-11-23 07:00:00
            forecast_period: 0 seconds

    Args:
        grid_points (int):
            Number of points along each spatial axis (square grid)
        validity_time (datetime.datetime):
            Time to use for test cubes
        fg_frt (datetime.datetime):
            Forecast reference time for first_guess_cube, which needs
            to have different forecast periods for different tests

    Returns:
        template_cube (iris.cube.Cube)
        first_guess_cube (iris.cube.Cube)
        lightning_rate_cube (iris.cube.Cube)
        prob_precip_cube (iris.cube.Cube):
            Has extra coordinate of length(3) "threshold" containing
            points [0.5, 7., 35.] mm h-1
        prob_vii_cube (iris.cube.Cube):
            Has extra coordinate of length(3) "threshold" containing
            points [0.5, 1., 2.] kg m-2
    """
    # template cube with metadata matching desired output
    data = np.ones((grid_points, grid_points), dtype=np.float32)
    template_cube = set_up_variable_cube(
        data.copy(),
        name="metadata_template",
        units=None,
        time=validity_time,
        frt=validity_time,
        spatial_grid="equalarea",
    )

    # first guess lightning rate probability cube with flexible forecast
    # period (required for level 2 lighting risk index)
    prob_fg = np.array([data.copy()], dtype=np.float32)
    first_guess_cube = set_up_probability_cube(
        prob_fg,
        np.array([0], dtype=np.float32),
        threshold_units="s-1",
        variable_name="rate_of_lightning",
        time=validity_time,
        frt=fg_frt,
        spatial_grid="equalarea",
    )
    first_guess_cube = squeeze(first_guess_cube)

    # lightning rate cube full of ones
    lightning_rate_cube = set_up_variable_cube(
        data.copy(),
        name="rate_of_lightning",
        units="min-1",
        time=validity_time,
        frt=validity_time,
        spatial_grid="equalarea",
    )

    # probability of precip rate exceedance cube with higher rate probabilities
    # set to zero, and central point of low rate probabilities set to zero
    precip_data = np.ones((3, grid_points, grid_points), dtype=np.float32)
    precip_thresholds = np.array([0.5, 7.0, 35.0], dtype=np.float32)
    prob_precip_cube = set_up_probability_cube(
        precip_data,
        precip_thresholds,
        variable_name="lwe_precipitation_rate",
        threshold_units="mm h-1",
        time=validity_time,
        frt=validity_time,
        spatial_grid="equalarea",
    )
    prob_precip_cube.data[0, 1, 1] = 0.0
    prob_precip_cube.data[1:, ...] = 0.0

    # probability of VII exceedance cube full of zeros
    vii_data = np.zeros((3, grid_points, grid_points), dtype=np.float32)
    vii_thresholds = np.array([0.5, 1.0, 2.0], dtype=np.float32)
    prob_vii_cube = set_up_probability_cube(
        vii_data,
        vii_thresholds,
        variable_name="vertical_integral_of_ice",
        threshold_units="kg m-2",
        time=validity_time,
        frt=validity_time,
        spatial_grid="equalarea",
    )

    return (
        template_cube,
        first_guess_cube,
        lightning_rate_cube,
        prob_precip_cube,
        prob_vii_cube,
    )
示例#27
0
def generate_metadata(
    name="air_pressure_at_sea_level",
    units=None,
    time_period=None,
    ensemble_members=8,
    leading_dimension=None,
    cube_type="variable",
    spp__relative_to_threshold="greater_than",
    npoints=71,
    **kwargs,
):
    """ Generate a cube with metadata only.

    Args:
        name (str):
            Output variable name, or if creating a probability cube the name of the
            underlying variable to which the probability field applies.
        units (Optional[str]):
            Output variable units, or if creating a probability cube the units of the
            underlying variable / threshold.
        time_period (Optional[int]):
            The period in minutes between the time bounds. This is used to calculate
            the lower time bound. If unset the diagnostic will be instantaneous, i.e.
            without time bounds.
        ensemble_members (Optional[int]):
            Number of ensemble members. Default 8, unless percentile or probability set
            to True.
        leading_dimension (Optional[List[float]]):
            List of realizations, percentiles or thresholds.
        cube_type (Optional[str]):
            The type of cube to be generated. Permitted values are "variable",
            "percentile" or "probability".
        spp__relative_to_threshold (Optional[str]):
            Value of the attribute "spp__relative_to_threshold" which is required for
            IMPROVER probability cubes.
        npoints (Optional[int]):
            Number of points along each of the y and x spatial axes.
        **kwargs:
            Additional keyword arguments to pass to the required cube setup function.

    Returns:
        iris.cube.Cube:
            Output of set_up_variable_cube(), set_up_percentile_cube() or
            set_up_probability_cube()
    """
    if cube_type not in CUBE_TYPES:
        raise ValueError(
            'Cube type {} not supported. Specify one of "variable", "percentile" or "probability".'
            .format(cube_type))

    if "spatial_grid" in kwargs and kwargs["spatial_grid"] not in (
            "latlon",
            "equalarea",
    ):
        raise ValueError(
            "Spatial grid {} not supported. Specify either latlon or equalarea."
            .format(kwargs["spatial_grid"]))

    if ("domain_corner" in kwargs and kwargs["domain_corner"] is not None
            and len(kwargs["domain_corner"]) != 2):
        raise ValueError("Domain corner must be a list or tuple of length 2.")

    if units is None:
        units = _get_units(name)

    # If time_period specified, create time bounds using time as upper bound
    if time_period is not None:
        if "time" not in kwargs:
            kwargs["time"] = DEFAULT_TIME

        time_bounds = _create_time_bounds(kwargs["time"], time_period)
        kwargs["time_bounds"] = time_bounds

    # If grid_spacing not specified, use default for requested spatial grid
    if "grid_spacing" not in kwargs or kwargs["grid_spacing"] is None:
        if "spatial_grid" not in kwargs:
            kwargs["spatial_grid"] = DEFAULT_SPATIAL_GRID

        kwargs["grid_spacing"] = DEFAULT_GRID_SPACING[kwargs["spatial_grid"]]

    # Create ndimensional array of zeros
    if "height_levels" not in kwargs:
        kwargs["height_levels"] = None

    data = _create_data_array(ensemble_members, leading_dimension, npoints,
                              kwargs["height_levels"])

    # Set up requested cube
    if cube_type == "percentile":
        metadata_cube = set_up_percentile_cube(
            data,
            percentiles=leading_dimension,
            name=name,
            units=units,
            **kwargs,
        )
    elif cube_type == "probability":
        metadata_cube = set_up_probability_cube(
            data,
            leading_dimension,
            variable_name=name,
            threshold_units=units,
            spp__relative_to_threshold=spp__relative_to_threshold,
            **kwargs,
        )
    else:
        metadata_cube = set_up_variable_cube(
            data,
            name=name,
            units=units,
            realizations=leading_dimension,
            **kwargs,
        )

    metadata_cube = squeeze(metadata_cube)

    return metadata_cube
示例#28
0
 def setUp(self):
     """Create cubes with a single zero prob(precip) point.
     The cubes look like this:
     precipitation_amount / (kg m^-2)
     Dimension coordinates:
         time: 1;
         projection_y_coordinate: 16;
         projection_x_coordinate: 16;
     Auxiliary coordinates:
         forecast_period (on time coord): 4.0 hours (simulates UM data)
     Scalar coordinates:
         forecast_reference_time: 2015-11-23 03:00:00
     Data:
     self.fg_cube:
         All points contain float(1.)
         Cube name is "probability_of_lightning".
     self.ltng_cube:
         forecast_period (on time coord): 0.0 hours (simulates nowcast data)
         All points contain float(1.)
         Cube name is "rate_of_lightning".
         Cube units are "min^-1".
     self.precip_cube:
         With extra coordinate of length(3) "threshold" containing
         points [0.5, 7., 35.] mm hr-1.
         All points contain float(1.) except the
         zero point [0, 0, 7, 7] which is float(0.)
         and [1:, 0, ...] which are float(0.)
         Cube name is "probability_of_precipitation".
         Cube has added attribute {'relative_to_threshold': 'above'}
     self.vii_cube:
         forecast_period (on time coord): 0.0 hours (simulates nowcast data)
         With extra coordinate of length(3) "threshold" containing
         points [0.5, 1., 2.] kg m^-2.
         forecast_period (on time coord): 0.0 hours (simulates nowcast data)
         Time and forecast_period dimensions "sqeezed" to be Scalar coords.
         All points contain float(0.)
         Cube name is "probability_of_vertical_integral_of_ice".
     """
     self.fg_cube = add_forecast_reference_time_and_forecast_period(
         set_up_cube_with_no_realizations(zero_point_indices=[]))
     self.fg_cube.rename("probability_of_lightning")
     self.ltng_cube = add_forecast_reference_time_and_forecast_period(
         set_up_cube_with_no_realizations(zero_point_indices=[]),
         fp_point=0.0)
     self.ltng_cube.rename("rate_of_lightning")
     self.ltng_cube.units = cf_units.Unit("min^-1")
     self.precip_cube = (add_forecast_reference_time_and_forecast_period(
         set_up_cube(num_realization_points=3)))
     threshold_coord = self.precip_cube.coord('realization')
     threshold_coord.points = [0.5, 7.0, 35.0]
     threshold_coord.rename('threshold')
     threshold_coord.units = cf_units.Unit('mm hr-1')
     self.precip_cube.rename("probability_of_precipitation")
     self.precip_cube.attributes.update({'relative_to_threshold': 'above'})
     self.precip_cube.data[1:, 0, ...] = 0.
     self.vii_cube = squeeze(
         add_forecast_reference_time_and_forecast_period(set_up_cube(
             num_realization_points=3, zero_point_indices=[]),
                                                         fp_point=0.0))
     threshold_coord = self.vii_cube.coord('realization')
     threshold_coord.points = [0.5, 1.0, 2.0]
     threshold_coord.rename('threshold')
     threshold_coord.units = cf_units.Unit('kg m^-2')
     self.vii_cube.data = np.zeros_like(self.vii_cube.data)
     self.vii_cube.rename("probability_of_vertical_integral_of_ice")
     self.plugin = Plugin()
示例#29
0
def load(uris, constraints=None, callback=None):
    cubes = iris.load(uris, constraints=constraints, callback=callback)
    fix_by_stash(cubes)
    cubes.sort(key=get_stash)

    return iris.cube.CubeList([squeeze(cube) for cube in cubes])
示例#30
0
 def load_single_file_callback(cube, field, filename):
     from iris.util import squeeze
     # Sometimes it's useful to remove length one dimensions from cubes, squeeze does this for us...
     return squeeze(cube)
示例#31
0
 def load_single_file_callback(cube, field, filename):
     from iris.util import squeeze
     # Sometimes it's useful to remove length one dimensions from cubes, squeeze does this for us...
     return squeeze(cube)