Exemplo n.º 1
0
    def test_basic(self):
        """Test times can be set"""
        coord_dims = construct_scalar_time_coords(datetime(2017, 12, 1, 14, 0),
                                                  None,
                                                  datetime(2017, 12, 1, 9, 0))
        time_coords = [item[0] for item in coord_dims]

        for crd in time_coords:
            self.assertIsInstance(crd, iris.coords.DimCoord)

        self.assertEqual(time_coords[0].name(), "time")
        self.assertEqual(
            iris_time_to_datetime(time_coords[0])[0],
            datetime(2017, 12, 1, 14, 0))
        self.assertEqual(time_coords[1].name(), "forecast_reference_time")
        self.assertEqual(
            iris_time_to_datetime(time_coords[1])[0],
            datetime(2017, 12, 1, 9, 0))
        self.assertEqual(time_coords[2].name(), "forecast_period")
        self.assertEqual(time_coords[2].points[0], 3600 * 5)

        for crd in time_coords[:2]:
            self.assertEqual(crd.dtype, np.int64)
            self.assertEqual(crd.units, "seconds since 1970-01-01 00:00:00")
        self.assertEqual(time_coords[2].units, "seconds")
        self.assertEqual(time_coords[2].dtype, np.int32)
Exemplo n.º 2
0
    def test_defaults(self):
        """Test default arguments produce cube with expected dimensions
        and metadata"""
        result = set_up_variable_cube(self.data)

        # check type, data and attributes
        self.assertIsInstance(result, iris.cube.Cube)
        self.assertEqual(result.standard_name, "air_temperature")
        self.assertEqual(result.name(), "air_temperature")
        self.assertEqual(result.units, "K")
        self.assertArrayAlmostEqual(result.data, self.data)
        self.assertEqual(result.attributes, {})

        # check dimension coordinates
        self.assertEqual(result.coord_dims("latitude"), (0, ))
        self.assertEqual(result.coord_dims("longitude"), (1, ))

        # check scalar time coordinates
        for time_coord in ["time", "forecast_reference_time"]:
            self.assertEqual(result.coord(time_coord).dtype, np.int64)
        self.assertEqual(result.coord("forecast_period").dtype, np.int32)

        expected_time = datetime(2017, 11, 10, 4, 0)
        time_point = iris_time_to_datetime(result.coord("time"))[0]
        self.assertEqual(time_point, expected_time)

        expected_frt = datetime(2017, 11, 10, 0, 0)
        frt_point = iris_time_to_datetime(
            result.coord("forecast_reference_time"))[0]
        self.assertEqual(frt_point, expected_frt)

        self.assertEqual(result.coord("forecast_period").units, "seconds")
        self.assertEqual(result.coord("forecast_period").points[0], 14400)

        check_mandatory_standards(result)
Exemplo n.º 3
0
def filter_non_matching_cubes(historic_forecast, truth):
    """
    Provide filtering for the historic forecast and truth to make sure
    that these contain matching validity times. This ensures that any
    mismatch between the historic forecasts and truth is dealt with.

    Args:
        historic_forecast (iris.cube.Cube):
            Cube of historic forecasts that potentially contains
            a mismatch compared to the truth.
        truth (iris.cube.Cube):
            Cube of truth that potentially contains a mismatch
            compared to the historic forecasts.

    Returns:
        (tuple): tuple containing:
            **matching_historic_forecasts** (iris.cube.Cube):
                Cube of historic forecasts where any mismatches with
                the truth cube have been removed.
            **matching_truths** (iris.cube.Cube):
                Cube of truths where any mismatches with
                the historic_forecasts cube have been removed.

    Raises:
        ValueError: The filtering has found no matches in validity time
            between the historic forecasts and the truths.

    """
    matching_historic_forecasts = iris.cube.CubeList([])
    matching_truths = iris.cube.CubeList([])
    for hf_slice in historic_forecast.slices_over("time"):
        if hf_slice.coord("time").has_bounds():
            point = iris_time_to_datetime(hf_slice.coord("time"),
                                          point_or_bound="point")
            (bounds, ) = iris_time_to_datetime(hf_slice.coord("time"),
                                               point_or_bound="bound")
            coord_values = {
                "time":
                lambda cell: point[0] == cell.point and bounds[0] == cell.
                bound[0] and bounds[1] == cell.bound[1]
            }
        else:
            coord_values = {
                "time":
                iris_time_to_datetime(hf_slice.coord("time"),
                                      point_or_bound="point")
            }

        constr = iris.Constraint(coord_values=coord_values)
        truth_slice = truth.extract(constr)

        if truth_slice:
            matching_historic_forecasts.append(hf_slice)
            matching_truths.append(truth_slice)
    if not matching_historic_forecasts and not matching_truths:
        msg = ("The filtering has found no matches in validity time "
               "between the historic forecasts and the truths.")
        raise ValueError(msg)
    return (matching_historic_forecasts.merge_cube(),
            matching_truths.merge_cube())
Exemplo n.º 4
0
def test_default():
    """ Tests default metadata cube generated """
    cube = generate_metadata()

    assert cube.name() == NAME_DEFAULT
    assert cube.standard_name == NAME_DEFAULT
    assert cube.units == UNITS_DEFAULT

    assert cube.ndim == NDIMS_DEFAULT
    assert cube.shape == (ENSEMBLE_MEMBERS_DEFAULT, NPOINTS_DEFAULT, NPOINTS_DEFAULT)

    spatial_grid_values = SPATIAL_GRID_ATTRIBUTE_DEFAULTS[SPATIAL_GRID_DEFAULT]
    assert cube.coords()[0].name() == "realization"
    assert cube.coords()[1].name() == spatial_grid_values["y"]
    assert cube.coords()[2].name() == spatial_grid_values["x"]

    for axis in ("y", "x"):
        assert cube.coord(axis=axis).units == spatial_grid_values["units"]
        assert cube.coord(axis=axis).coord_system == spatial_grid_values["coord_system"]
        assert np.diff(cube.coord(axis=axis).points)[0] == pytest.approx(
            spatial_grid_values["grid_spacing"]
        )

    assert np.count_nonzero(cube.data) == 0

    assert iris_time_to_datetime(cube.coord("time"))[0] == TIME_DEFAULT
    assert cube.coord("time").bounds is None
    assert (
        iris_time_to_datetime(cube.coord("forecast_reference_time"))[0] == FRT_DEFAULT
    )
    assert cube.coord("forecast_period").points == FORECAST_PERIOD_DEFAULT

    assert cube.attributes == ATTRIBUTES_DEFAULT
Exemplo n.º 5
0
 def test_input_cube_unmodified(self):
     """Test that an input cube with unexpected coordinate units is not
     modified"""
     self.cube.coord("time").convert_units("hours since 1970-01-01 00:00:00")
     self.cube.coord("time").points = self.cube.coord("time").points.astype(np.int64)
     reference_coord = self.cube.coord("time").copy()
     iris_time_to_datetime(self.cube.coord("time"))
     self.assertArrayEqual(self.cube.coord("time").points, reference_coord.points)
     self.assertArrayEqual(self.cube.coord("time").units, reference_coord.units)
     self.assertEqual(self.cube.coord("time").dtype, np.int64)
 def test_time_points(self):
     """Test ability to configure time and forecast reference time"""
     expected_time = datetime(2018, 3, 1, 12, 0)
     expected_frt = datetime(2018, 3, 1, 9, 0)
     result = set_up_variable_cube(self.data, time=expected_time,
                                   frt=expected_frt)
     time_point = iris_time_to_datetime(result.coord("time"))[0]
     self.assertEqual(time_point, expected_time)
     frt_point = iris_time_to_datetime(
         result.coord("forecast_reference_time"))[0]
     self.assertEqual(frt_point, expected_frt)
     self.assertEqual(result.coord("forecast_period").points[0], 10800)
     self.assertFalse(result.coords('time', dim_coords=True))
Exemplo n.º 7
0
    def _select_orographic_enhancement_cube(precip_cube,
                                            oe_cube,
                                            allowed_time_diff=1800):
        """Select the orographic enhancement cube with the required time
        coordinate.

        Args:
            precip_cube (iris.cube.Cube):
                Cube containing the input precipitation fields.
            oe_cube (iris.cube.Cube):
                Cube containing orographic enhancement fields at one or
                more times.
            allowed_time_diff (int):
                The maximum permitted difference, in integer seconds,
                between the datetime of the precipitation cube and the time
                points available within the orographic enhancement cube.
                If this limit is exceeded, then an error is raised.


        Returns:
            iris.cube.Cube:
                Cube containing the orographic enhancement field at the
                required time.

        """
        (time_point, ) = iris_time_to_datetime(
            precip_cube.coord("time").copy())
        oe_cube_slice = extract_nearest_time_point(
            oe_cube, time_point, allowed_dt_difference=allowed_time_diff)
        return oe_cube_slice
Exemplo n.º 8
0
    def _select_orographic_enhancement_cube(
            precip_cube: Cube,
            oe_cube: Cube,
            allowed_time_diff: int = 1800) -> Cube:
        """Select the orographic enhancement cube with the required time
        coordinate.

        Args:
            precip_cube:
                Cube containing the input precipitation fields.
            oe_cube:
                Cube containing orographic enhancement fields at one or
                more times.
            allowed_time_diff:
                The maximum permitted difference, in integer seconds,
                between the datetime of the precipitation cube and the time
                points available within the orographic enhancement cube.
                If this limit is exceeded, then an error is raised.


        Returns:
            Cube containing the orographic enhancement field at the
            required time.

        Raises:
            ValueError: If required time step is not available within tolerance
                (in theory.  In practise, the tolerance is left as the default
                None, which matches ANY available field regardless of time
                offset.  So this error will never be thrown.)
        """
        (time_point, ) = iris_time_to_datetime(
            precip_cube.coord("time").copy())
        oe_cube_slice = extract_nearest_time_point(
            oe_cube, time_point, allowed_dt_difference=allowed_time_diff)
        return oe_cube_slice
Exemplo n.º 9
0
    def _set_up_output_cubes(self, all_forecasts: ndarray) -> CubeList:
        """
        Convert 3D numpy array into list of cubes with correct time metadata.
        All other metadata are inherited from self.analysis_cube.

        Args:
            all_forecasts:
                Array of 2D forecast fields returned by extrapolation function

        Returns:
            List of extrapolated cubes with correct time coordinates
        """
        current_datetime = iris_time_to_datetime(
            self.analysis_cube.coord("time"))[0]
        forecast_cubes = [self.analysis_cube.copy()]
        for i in range(len(all_forecasts)):
            # copy forecast data into template cube
            new_cube = self.analysis_cube.copy(
                data=all_forecasts[i, :, :].astype(np.float32))
            # update time and forecast period coordinates
            current_datetime += timedelta(seconds=self.interval * 60)
            current_time = datetime_to_iris_time(current_datetime)
            new_cube.coord("time").points = np.array([current_time],
                                                     dtype=np.int64)
            new_cube.coord("forecast_period").points = np.array(
                [(i + 1) * self.interval * 60], dtype=np.int32)
            forecast_cubes.append(new_cube)
        return forecast_cubes
Exemplo n.º 10
0
    def _select_orographic_enhancement_cube(precip_cube,
                                            oe_cubes,
                                            allowed_time_diff=1800):
        """Select the orographic enhancement cube with the required time
        coordinate.

        Args:
            precip_cube (iris.cube.Cube):
                Cube containing the input precipitation fields.
            oe_cubes (iris.cube.Cube or iris.cube.CubeList):
                Cube or CubeList containing the orographic enhancement fields.
            allowed_time_diff (int):
                An int in seconds to define a limit to the maximum difference
                between the datetime of the precipitation cube and the time
                points available within the orographic enhancement cube.
                If this limit is exceeded, then an error is raised.


        Returns:
            iris.cube.Cube:
                Cube containing the orographic enhancement fields at the
                required time.

        """
        time_point, = iris_time_to_datetime(precip_cube.coord("time").copy())
        oe_cube = extract_nearest_time_point(
            oe_cubes, time_point, allowed_dt_difference=allowed_time_diff)
        return oe_cube
Exemplo n.º 11
0
 def test_basic(self):
     """Test iris_time_to_datetime returns list of datetime """
     result = iris_time_to_datetime(self.cube.coord("time"))
     self.assertIsInstance(result, list)
     for item in result:
         self.assertIsInstance(item, datetime)
     self.assertEqual(result[0], datetime(2017, 2, 17, 6, 0))
Exemplo n.º 12
0
def get_datetime_limits(time_coord, start_hour):
    """
    Determine the date limits of a time coordinate axis and return time limits
    using a provided hour on that day.

    Args:
        time_coord (iris.coords.DimCoord):
            An iris time coordinate from which to extract the date limits.

        start_hour (int):
            The hour on a 24hr clock at which to set the returned times.

    Returns:
        (tuple) : tuple containing:
            **start_time** (datetime.datetime object):
                First day on a time coordinate, with the time set to the hour
                given by start hour

            **end_time** (datetime.datetime object):
                Last day on a time coordinate, with the time set to the hour
                given by start hour
    """
    dates = iris_time_to_datetime(time_coord)
    start_time = dt.combine(min(dates).date(), datetime.time(start_hour))
    end_time = dt.combine(max(dates).date(), datetime.time(start_hour))
    return start_time, end_time
 def test_time_bounds(self):
     """Test creation of time coordinate with bounds"""
     coord_dims = construct_scalar_time_coords(
         datetime(2017, 12, 1, 14, 0), (datetime(2017, 12, 1, 13, 0),
                                        datetime(2017, 12, 1, 14, 0)),
         datetime(2017, 12, 1, 9, 0))
     time_coord = coord_dims[0][0]
     self.assertEqual(iris_time_to_datetime(time_coord)[0],
                      datetime(2017, 12, 1, 14, 0))
     self.assertEqual(time_coord.bounds[0][0], time_coord.points[0] - 3600)
     self.assertEqual(time_coord.bounds[0][1], time_coord.points[0])
 def test_time_bounds_wrong_order(self):
     """Test time bounds are correctly applied even if supplied in the wrong
     order"""
     coord_dims = construct_scalar_time_coords(
         datetime(2017, 12, 1, 14, 0), (datetime(2017, 12, 1, 14, 0),
                                        datetime(2017, 12, 1, 13, 0)),
         datetime(2017, 12, 1, 9, 0))
     time_coord = coord_dims[0][0]
     self.assertEqual(iris_time_to_datetime(time_coord)[0],
                      datetime(2017, 12, 1, 14, 0))
     self.assertEqual(time_coord.bounds[0][0], time_coord.points[0] - 3600)
     self.assertEqual(time_coord.bounds[0][1], time_coord.points[0])
Exemplo n.º 15
0
def test_set_time():
    """ Tests cube generated with specified time and the rest of the values set as
    default values """
    time = datetime(2020, 1, 1, 0, 0)
    cube = generate_metadata(time=time)

    assert iris_time_to_datetime(cube.coord("time"))[0] == time
    assert cube.coord("forecast_period").points > FORECAST_PERIOD_DEFAULT

    # Assert that no other values have unexpectedly changed by returning changed values
    # to defaults and comparing against default cube
    default_cube = generate_metadata()
    cube.coord("time").points = default_cube.coord("time").points
    cube.coord("forecast_period").points = default_cube.coord("forecast_period").points

    assert cube == default_cube
Exemplo n.º 16
0
    def test_bounds(self):
        """Test iris_time_to_datetime returns list of datetimes calculated
        from the coordinate bounds."""
        # Assign time bounds equivalent to [
        # datetime(2017, 2, 17, 5, 0),
        # datetime(2017, 2, 17, 6, 0)]
        self.cube.coord("time").bounds = [1487307600, 1487311200]

        result = iris_time_to_datetime(self.cube.coord("time"), point_or_bound="bound")
        self.assertIsInstance(result, list)
        self.assertEqual(len(result), 1)
        self.assertEqual(len(result[0]), 2)
        for item in result[0]:
            self.assertIsInstance(item, datetime)
        self.assertEqual(result[0][0], datetime(2017, 2, 17, 5, 0))
        self.assertEqual(result[0][1], datetime(2017, 2, 17, 6, 0))
Exemplo n.º 17
0
    def _filter_non_matching_cubes(historic_forecast, truth):
        """
        Provide filtering for the historic forecast and truth to make sure
        that these contain matching validity times. This ensures that any
        mismatch between the historic forecasts and truth is dealt with.

        Args:
            historic_forecast (iris.cube.Cube):
                Cube of historic forecasts that potentially contains
                a mismatch compared to the truth.
            truth (iris.cube.Cube):
                Cube of truth that potentially contains a mismatch
                compared to the historic forecasts.

        Returns:
            (tuple): tuple containing
                matching_historic_forecasts (iris.cube.Cube):
                    Cube of historic forecasts where any mismatches with
                    the truth cube have been removed.
                matching_truths (iris.cube.Cube):
                    Cube of truths where any mismatches with
                    the historic_forecasts cube have been removed.

        Raises:
            ValueError: The filtering has found no matches in validity time
                between the historic forecasts and the truths.

        """
        matching_historic_forecasts = iris.cube.CubeList([])
        matching_truths = iris.cube.CubeList([])
        for hf_slice in historic_forecast.slices_over("time"):
            coord_values = ({
                "time":
                iris_time_to_datetime(hf_slice.coord("time"))
            })
            constr = iris.Constraint(coord_values=coord_values)
            truth_slice = truth.extract(constr)
            if truth_slice:
                matching_historic_forecasts.append(hf_slice)
                matching_truths.append(truth_slice)
        if not matching_historic_forecasts and not matching_truths:
            msg = ("The filtering has found no matches in validity time "
                   "between the historic forecasts and the truths.")
            raise ValueError(msg)
        return (matching_historic_forecasts.merge_cube(),
                matching_truths.merge_cube())
Exemplo n.º 18
0
    def process(self, cube):
        """
        Calculate the daynight mask for the provided cube. Note that only the
        hours and minutes of the dtval variable are used. To ensure consistent
        behaviour with changes of second or subsecond precision, the second
        component is added to the time object. This means that when the hours
        and minutes are used, we have correctly rounded to the nearest minute,
        e.g.::

           dt(2017, 1, 1, 11, 59, 59) -- +59 --> dt(2017, 1, 1, 12, 0, 58)
           dt(2017, 1, 1, 12, 0, 1)   -- +1  --> dt(2017, 1, 1, 12, 0, 2)
           dt(2017, 1, 1, 12, 0, 30)  -- +30 --> dt(2017, 1, 1, 12, 1, 0)

        Args:
            cube (iris.cube.Cube):
                input cube

        Returns:
            daynight_mask (iris.cube.Cube):
                daynight mask cube, daytime set to self.day
                nighttime set to self.night.
                The resulting cube will be the same shape as
                the time, y, and x coordinate, other coordinates
                will be ignored although they might appear as attributes
                on the cube as it is extracted from the first slice.
        """
        daynight_mask = self._create_daynight_mask(cube)
        dtvalues = iris_time_to_datetime(daynight_mask.coord('time'))
        for i, dtval in enumerate(dtvalues):
            mask_cube = daynight_mask[i]
            day_of_year = (dtval - dt.datetime(dtval.year, 1, 1)).days
            dtval = dtval + dt.timedelta(seconds=dtval.second)
            utc_hour = (dtval.hour * 60.0 + dtval.minute) / 60.0
            trg_crs = lat_lon_determine(mask_cube)
            # Grids that are not Lat Lon
            if trg_crs is not None:
                lats, lons = transform_grid_to_lat_lon(mask_cube)
                solar_el = calc_solar_elevation(lats, lons,
                                                day_of_year, utc_hour)
                mask_cube.data[np.where(solar_el > 0.0)] = self.day
            else:
                mask_cube = self._daynight_lat_lon_cube(mask_cube,
                                                        day_of_year, utc_hour)
            daynight_mask.data[i, ::] = mask_cube.data
        return daynight_mask
Exemplo n.º 19
0
def test_set_frt():
    """ Tests cube generated with specified forecast reference time and the rest of the
    values set as default values """
    frt = datetime(2017, 1, 1, 0, 0)
    cube = generate_metadata(frt=frt)

    assert iris_time_to_datetime(cube.coord("forecast_reference_time"))[0] == frt
    assert cube.coord("forecast_period").points > 0

    # Assert that no other values have unexpectedly changed by returning changed values
    # to defaults and comparing against default cube
    default_cube = generate_metadata()

    cube.coord("forecast_reference_time").points = default_cube.coord(
        "forecast_reference_time"
    ).points
    cube.coord("forecast_period").points = default_cube.coord("forecast_period").points

    assert cube == default_cube
Exemplo n.º 20
0
    def _select_orographic_enhancement_cube(precip_cube, oe_cubes):
        """Select the orographic enhancement cube with the required time
        coordinate.

        Args:
            precip_cube (iris.cube.Cube):
                Cube containing the input precipitation fields.
            oe_cubes (iris.cube.Cube or iris.cube.CubeList):
                Cube or CubeList containing the orographic enhancement fields.

        Returns:
            oe_cube (iris.cube.Cube):
                Cube containing the orographic enhancement fields at the
                required time.

        """
        time_point, = iris_time_to_datetime(precip_cube.coord("time").copy())
        oe_cube = extract_nearest_time_point(oe_cubes, time_point)
        return oe_cube
Exemplo n.º 21
0
def test_set_time_period():
    """ Tests cube generated with time bounds calculated using specified time_period
    and the rest of the values set as default values """
    time_period = 150
    cube = generate_metadata(time_period=time_period)

    assert iris_time_to_datetime(cube.coord("time"))[0] == TIME_DEFAULT
    assert cube.coord("forecast_period").points == FORECAST_PERIOD_DEFAULT
    assert cube.coord("time").bounds[0][0] == datetime_to_iris_time(
        datetime(2017, 11, 10, 1, 30)
    )
    assert cube.coord("time").bounds[0][1] == datetime_to_iris_time(TIME_DEFAULT)

    # Assert that no other values have unexpectedly changed by returning changed values
    # to defaults and comparing against default cube
    default_cube = generate_metadata()
    cube.coord("time").bounds = None
    cube.coord("forecast_period").bounds = None

    assert cube == default_cube
Exemplo n.º 22
0
    def process(self, cube):
        """
        Calculate the daynight mask for the provided cube

        Args:
            cube (iris.cube.Cube):
                input cube

        Returns:
            daynight_mask (iris.cube.Cube):
                daynight mask cube, daytime set to self.day
                nighttime set to self.night.
                The resulting cube will be the same shape as
                the time, y, and x coordinate, other coordinates
                will be ignored although they might appear as attributes
                on the cube as it is extracted from the first slice.
        """
        daynight_mask = self._create_daynight_mask(cube)
        dtvalues = iris_time_to_datetime(daynight_mask.coord('time'))
        for i, dtval in enumerate(dtvalues):
            mask_cube = daynight_mask[i]
            day_of_year = (dtval - dt.datetime(dtval.year, 1, 1)).days
            utc_hour = (dtval.hour * 60.0 + dtval.minute) / 60.0
            trg_crs = lat_lon_determine(mask_cube)
            # Grids that are not Lat Lon
            if trg_crs is not None:
                lats, lons = transform_grid_to_lat_lon(mask_cube)
                solar_el = calc_solar_elevation(lats, lons, day_of_year,
                                                utc_hour)
                mask_cube.data[np.where(solar_el > 0.0)] = self.day
            else:
                mask_cube = self._daynight_lat_lon_cube(
                    mask_cube, day_of_year, utc_hour)
            daynight_mask.data[i, ::] = mask_cube.data

        return daynight_mask
Exemplo n.º 23
0
    def process(self, cube_t0, cube_t1):
        """
        Interpolate data to intermediate times between validity times of
        cube_t0 and cube_t1.

        Args:
            cube_t0 (iris.cube.Cube):
                A diagnostic cube valid at the beginning of the period within
                which interpolation is to be permitted.
            cube_t1 (iris.cube.Cube):
                A diagnostic cube valid at the end of the period within which
                interpolation is to be permitted.

        Returns:
            interpolated_cubes (iris.cube.CubeList):
                A list of cubes interpolated to the desired times.

        Raises:
            TypeError: If cube_t0 and cube_t1 are not of type iris.cube.Cube.
            CoordinateNotFoundError: The input cubes contain no time
                                     coordinate.
            ValueError: Cubes contain multiple validity times.
            ValueError: The input cubes are ordered such that the initial time
                        cube has a later validity time than the final cube.
        """
        if (not isinstance(cube_t0, iris.cube.Cube)
                or not isinstance(cube_t1, iris.cube.Cube)):
            msg = ('Inputs to TemporalInterpolation are not of type '
                   'iris.cube.Cube, first input is type '
                   '{}, second input is type {}'.format(
                       type(cube_t0), type(cube_t1)))
            raise TypeError(msg)

        try:
            initial_time, = iris_time_to_datetime(cube_t0.coord('time'))
            final_time, = iris_time_to_datetime(cube_t1.coord('time'))
        except CoordinateNotFoundError:
            msg = ('Cube provided to TemporalInterpolation contains no time '
                   'coordinate.')
            raise CoordinateNotFoundError(msg)
        except ValueError:
            msg = ('Cube provided to TemporalInterpolation contains multiple '
                   'validity times, only one expected.')
            raise ValueError(msg)

        if initial_time > final_time:
            raise ValueError('TemporalInterpolation input cubes '
                             'ordered incorrectly'
                             ', with the final time being before the initial '
                             'time.')

        time_list = self.construct_time_list(initial_time, final_time)
        cubes = iris.cube.CubeList([cube_t0, cube_t1])
        cube = merge_cubes(cubes)

        interpolated_cube = cube.interpolate(time_list, iris.analysis.Linear())
        self.enforce_time_coords_dtype(interpolated_cube)
        interpolated_cubes = iris.cube.CubeList()
        if self.interpolation_method == 'solar':
            interpolated_cubes = self.solar_interpolate(
                cube, interpolated_cube)
        elif self.interpolation_method == 'daynight':
            interpolated_cubes = (self.daynight_interpolate(interpolated_cube))
        else:
            for single_time in interpolated_cube.slices_over('time'):
                interpolated_cubes.append(single_time)

        return interpolated_cubes
Exemplo n.º 24
0
def process(
    start_cube: cli.inputcube,
    end_cube: cli.inputcube,
    *,
    interval_in_mins: int = None,
    times: cli.comma_separated_list = None,
    interpolation_method="linear",
):
    """Interpolate data between validity times.

    Interpolate data to intermediate times between the validity times of two
    cubes. This can be used to fill in missing data (e.g. for radar fields)
    or to ensure data is available at the required intervals when model data
    is not available at these times.

    Args:
        start_cube (iris.cube.Cube):
            Cube containing the data at the beginning.
        end_cube (iris.cube.Cube):
            Cube containing the data at the end.
        interval_in_mins (int):
            Specifies the interval in minutes at which to interpolate between
            the two input cubes.
            A number of minutes which does not divide up the interval equally
            will raise an exception.
            If intervals_in_mins is set then times can not be used.
        times (str):
            Specifies the times in the format {YYYYMMDD}T{HHMM}Z
            at which to interpolate between the two input cubes.
            Where {YYYYMMDD} is year, month, day and {HHMM} is hour and minutes
            e.g 20180116T0100Z. More than one time can be provided separated
            by a comma.
            If times are set, interval_in_mins can not be used.
        interpolation_method (str):
            ["linear", "solar", "daynight"]
            Specifies the interpolation method;
            solar interpolates using the solar elevation,
            daynight uses linear interpolation but sets night time points to
            0.0 linear is linear interpolation.

    Returns:
        iris.cube.CubeList:
            A list of cubes interpolated to the desired times. The
            interpolated cubes will always be in chronological order of
            earliest to latest regardless of the order of the input.
    """
    from improver.utilities.cube_manipulation import MergeCubes
    from improver.utilities.temporal import cycletime_to_datetime, iris_time_to_datetime
    from improver.utilities.temporal_interpolation import TemporalInterpolation

    (time_start,) = iris_time_to_datetime(start_cube.coord("time"))
    (time_end,) = iris_time_to_datetime(end_cube.coord("time"))
    if time_end < time_start:
        # swap cubes
        start_cube, end_cube = end_cube, start_cube

    if times is not None:
        times = [cycletime_to_datetime(timestr) for timestr in times]

    result = TemporalInterpolation(
        interval_in_minutes=interval_in_mins,
        times=times,
        interpolation_method=interpolation_method,
    )(start_cube, end_cube)
    return MergeCubes()(result)
Exemplo n.º 25
0
 def test_basic(self):
     """Test iris_time_to_datetime returns list of datetime """
     result = iris_time_to_datetime(self.cube.coord('time'))
     self.assertIsInstance(result, list)
     self.assertEqual(result[0], datetime.datetime(2017, 2, 17, 6, 0))
    def setUp(self):
        """
        Set up cubes for use in testing SpotLapseRateAdjust. Inputs are
        envisaged as follows:

        Gridded

         Lapse rate  Orography  Temperatures (not used directly)
          (x DALR)

            A B C      A B C        A   B   C

        a   2 1 1      1 1 1       270 270 270
        b   1 2 1      1 4 1       270 280 270
        c   1 1 2      1 1 1       270 270 270

        Spot
        (note the neighbours are identified with the A-C, a-c indices above)

         Site  Temperature Altitude  Nearest    DZ   MinDZ      DZ
                                     neighbour       neighbour

          0        280        3      Ac         2    Bb         -1
          1        270        4      Bb         0    Bb          0
          2        280        0      Ca        -1    Ca         -1


        """
        # Set up lapse rate cube
        lapse_rate_data = np.ones(9).reshape(3, 3).astype(np.float32) * DALR
        lapse_rate_data[0, 2] = 2 * DALR
        lapse_rate_data[1, 1] = 2 * DALR
        lapse_rate_data[2, 0] = 2 * DALR
        self.lapse_rate_cube = set_up_variable_cube(lapse_rate_data,
                                                    name="lapse_rate",
                                                    units="K m-1",
                                                    spatial_grid="equalarea")
        diagnostic_cube_hash = create_coordinate_hash(self.lapse_rate_cube)

        # Set up neighbour and spot diagnostic cubes
        y_coord, x_coord = construct_xy_coords(3, 3, "equalarea")
        y_coord = y_coord.points
        x_coord = x_coord.points

        # neighbours, each group is for a point under two methods, e.g.
        # [ 0.  0.  0.] is the nearest point to the first spot site, whilst
        # [ 1.  1. -1.] is the nearest point with minimum height difference.
        neighbours = np.array([[[0., 0., 2.], [1., 1., -1.]],
                               [[1., 1., 0.], [1., 1., 0.]],
                               [[2., 2., -1.], [2., 2., -1.]]])
        altitudes = np.array([3, 4, 0])
        latitudes = np.array([y_coord[0], y_coord[1], y_coord[2]])
        longitudes = np.array([x_coord[0], x_coord[1], x_coord[2]])
        wmo_ids = np.arange(3)
        grid_attributes = ['x_index', 'y_index', 'vertical_displacement']
        neighbour_methods = ['nearest', 'nearest_minimum_dz']
        self.neighbour_cube = build_spotdata_cube(
            neighbours,
            'grid_neighbours',
            1,
            altitudes,
            latitudes,
            longitudes,
            wmo_ids,
            grid_attributes=grid_attributes,
            neighbour_methods=neighbour_methods)
        self.neighbour_cube.attributes['model_grid_hash'] = (
            diagnostic_cube_hash)

        time, = iris_time_to_datetime(self.lapse_rate_cube.coord("time"))
        frt, = iris_time_to_datetime(
            self.lapse_rate_cube.coord("forecast_reference_time"))
        time_bounds = None

        time_coords = construct_scalar_time_coords(time, time_bounds, frt)
        time_coords = [item[0] for item in time_coords]

        # This temperature cube is set up with the spot sites having obtained
        # their temperature values from the nearest grid sites.
        temperatures_nearest = np.array([280, 270, 280])
        self.spot_temperature_nearest = build_spotdata_cube(
            temperatures_nearest,
            'air_temperature',
            'K',
            altitudes,
            latitudes,
            longitudes,
            wmo_ids,
            scalar_coords=time_coords)
        self.spot_temperature_nearest.attributes['model_grid_hash'] = (
            diagnostic_cube_hash)

        # This temperature cube is set up with the spot sites having obtained
        # their temperature values from the nearest minimum vertical
        # displacment grid sites. The only difference here is for site 0, which
        # now gets its temperature from Bb (see doc-string above).
        temperatures_mindz = np.array([270, 270, 280])
        self.spot_temperature_mindz = build_spotdata_cube(
            temperatures_mindz,
            'air_temperature',
            'K',
            altitudes,
            latitudes,
            longitudes,
            wmo_ids,
            scalar_coords=time_coords)
        self.spot_temperature_mindz.attributes['model_grid_hash'] = (
            diagnostic_cube_hash)
Exemplo n.º 27
0
    def _apply_params(self, forecast_predictors, forecast_vars,
                      optimised_coeffs, coeff_names, predictor_of_mean_flag):
        """
        Function to apply EMOS coefficients to all required dates.

        Args:
            forecast_predictors (Iris cube):
                Cube containing the forecast predictor e.g. ensemble mean
                or ensemble realizations.
            forecast_vars (Iris cube.):
                Cube containing the forecast variance e.g. ensemble variance.
            optimised_coeffs (List):
                Coefficients for all dates.
            coeff_names (List):
                Coefficient names.
            predictor_of_mean_flag (String):
                String to specify the input to calculate the calibrated mean.
                Currently the ensemble mean ("mean") and the ensemble
                realizations ("realizations") are supported as the predictors.

        Returns:
            (tuple) : tuple containing:
                **calibrated_forecast_predictor_all_dates** (CubeList):
                    List of cubes containing the calibrated forecast predictor.
                **calibrated_forecast_var_all_dates** (CubeList):
                    List of cubes containing the calibrated forecast variance.
                **calibrated_forecast_coefficients_all_dates** (CubeList):
                    List of cubes containing the coefficients used for
                    calibration.

        """
        calibrated_forecast_predictor_all_dates = iris.cube.CubeList()
        calibrated_forecast_var_all_dates = iris.cube.CubeList()
        calibrated_forecast_coefficients_all_dates = iris.cube.CubeList()

        for forecast_predictor, forecast_var in zip(
                forecast_predictors.slices_over("time"),
                forecast_vars.slices_over("time")):

            date = iris_time_to_datetime(
                forecast_predictor.coord("time").copy())[0]
            constr = iris.Constraint(time=date)
            forecast_predictor_at_date = forecast_predictor.extract(constr)
            forecast_var_at_date = forecast_var.extract(constr)

            # If the coefficients are not available for the date, use the
            # raw ensemble forecast as the calibrated ensemble forecast.
            if date not in optimised_coeffs.keys():
                msg = ("Ensemble calibration not available "
                       "for forecasts with start time of {}. "
                       "Coefficients not available".format(
                           date.strftime("%Y%m%d%H%M")))
                warnings.warn(msg)
                calibrated_forecast_predictor_at_date = (
                    forecast_predictor_at_date.copy())
                calibrated_forecast_var_at_date = forecast_var_at_date.copy()
                optimised_coeffs[date] = np.full(len(coeff_names), np.nan)
                coeff_cubes = self._create_coefficient_cube(
                    forecast_predictor_at_date, optimised_coeffs, coeff_names)
            else:
                optimised_coeffs_at_date = (optimised_coeffs[date])

                # Assigning coefficients to coefficient names.
                if len(optimised_coeffs_at_date) == len(coeff_names):
                    optimised_coeffs_at_date = dict(
                        zip(coeff_names, optimised_coeffs_at_date))
                elif len(optimised_coeffs_at_date) > len(coeff_names):
                    excess_beta = (
                        optimised_coeffs_at_date[len(coeff_names):].tolist())
                    optimised_coeffs_at_date = (dict(
                        list(zip(coeff_names, optimised_coeffs_at_date))))
                    optimised_coeffs_at_date["beta"] = np.array(
                        [optimised_coeffs_at_date["beta"]] + excess_beta)
                else:
                    msg = ("Number of coefficient names {} with names {} "
                           "is not equal to the number of "
                           "optimised_coeffs_at_date values {} "
                           "with values {} or the number of "
                           "coefficients is not greater than the "
                           "number of coefficient names. Can not continue "
                           "if the number of coefficient names out number "
                           "the number of coefficients".format(
                               len(coeff_names), coeff_names,
                               len(optimised_coeffs_at_date),
                               optimised_coeffs_at_date))
                    raise ValueError(msg)

                if predictor_of_mean_flag.lower() in ["mean"]:
                    # Calculate predicted mean = a + b*X, where X is the
                    # raw ensemble mean. In this case, b = beta.
                    beta = [
                        optimised_coeffs_at_date["a"],
                        optimised_coeffs_at_date["beta"]
                    ]
                    forecast_predictor_flat = (
                        forecast_predictor_at_date.data.flatten())
                    new_col = np.ones(forecast_predictor_flat.shape)
                    all_data = np.column_stack(
                        (new_col, forecast_predictor_flat))
                    predicted_mean = np.dot(all_data, beta)
                    calibrated_forecast_predictor_at_date = (
                        forecast_predictor_at_date)
                elif predictor_of_mean_flag.lower() in ["realizations"]:
                    # Calculate predicted mean = a + b*X, where X is the
                    # raw ensemble mean. In this case, b = beta^2.
                    beta = np.concatenate([[optimised_coeffs_at_date["a"]],
                                           optimised_coeffs_at_date["beta"]**2
                                           ])
                    forecast_predictor = (enforce_coordinate_ordering(
                        forecast_predictor, "realization"))
                    forecast_predictor_flat = (
                        convert_cube_data_to_2d(forecast_predictor_at_date))
                    forecast_var_flat = forecast_var_at_date.data.flatten()

                    new_col = np.ones(forecast_var_flat.shape)
                    all_data = (np.column_stack(
                        (new_col, forecast_predictor_flat)))
                    predicted_mean = np.dot(all_data, beta)
                    # Calculate mean of ensemble realizations, as only the
                    # calibrated ensemble mean will be returned.
                    calibrated_forecast_predictor_at_date = (
                        forecast_predictor_at_date.collapsed(
                            "realization", iris.analysis.MEAN))

                xlen = len(forecast_predictor_at_date.coord(axis="x").points)
                ylen = len(forecast_predictor_at_date.coord(axis="y").points)
                predicted_mean = np.reshape(predicted_mean, (ylen, xlen))
                calibrated_forecast_predictor_at_date.data = predicted_mean

                # Calculating the predicted variance, based on the
                # raw variance S^2, where predicted variance = c + dS^2,
                # where c = (gamma)^2 and d = (delta)^2
                predicted_var = (optimised_coeffs_at_date["gamma"]**2 +
                                 optimised_coeffs_at_date["delta"]**2 *
                                 forecast_var_at_date.data)

                calibrated_forecast_var_at_date = forecast_var_at_date
                calibrated_forecast_var_at_date.data = predicted_var

                coeff_cubes = self._create_coefficient_cube(
                    calibrated_forecast_predictor_at_date,
                    optimised_coeffs[date], coeff_names)

            calibrated_forecast_predictor_all_dates.append(
                calibrated_forecast_predictor_at_date)
            calibrated_forecast_var_all_dates.append(
                calibrated_forecast_var_at_date)
            calibrated_forecast_coefficients_all_dates.extend(coeff_cubes)

        return (calibrated_forecast_predictor_all_dates,
                calibrated_forecast_var_all_dates,
                calibrated_forecast_coefficients_all_dates)
Exemplo n.º 28
0
    def estimate_coefficients_for_ngr(self, current_forecast,
                                      historic_forecast, truth):
        """
        Using Nonhomogeneous Gaussian Regression/Ensemble Model Output
        Statistics, estimate the required coefficients from historical
        forecasts.

        The main contents of this method is:

        1. Metadata checks to ensure that the current forecast, historic
           forecast and truth exist in a form that can be processed.
        2. Loop through times within the concatenated current forecast cube:

           1. Extract the desired forecast period from the historic forecasts
              to match the current forecasts. Apply unit conversion to ensure
              that historic forecasts have the desired units for calibration.
           2. Extract the relevant truth to co-incide with the time within
              the historic forecasts. Apply unit conversion to ensure
              that the truth has the desired units for calibration.
           3. Calculate mean and variance.
           4. Calculate initial guess at coefficient values by performing a
              linear regression, if requested, otherwise default values are
              used.
           5. Perform minimisation.

        Args:
            current_forecast (Iris Cube or CubeList):
                The cube containing the current forecast.
            historical_forecast (Iris Cube or CubeList):
                The cube or cubelist containing the historical forecasts used
                for calibration.
            truth (Iris Cube or CubeList):
                The cube or cubelist containing the truth used for calibration.

        Returns:
            (tuple): tuple containing:
                **optimised_coeffs** (Dictionary):
                    Dictionary containing a list of the optimised coefficients
                    for each date.
                **coeff_names** (List):
                    The name of each coefficient.

        """
        def convert_to_cubelist(cubes, cube_type="forecast"):
            """
            Convert cube to cubelist, if necessary.

            Args:
                cubes (Iris Cube or Iris CubeList):
                    Cube to be converted to CubeList.
                cube_type (String):
                    String to describe the cube, which is being converted to a
                    CubeList.

            Raises
            ------
            TypeError: The input cube is not an Iris cube.

            """
            if not isinstance(cubes, iris.cube.CubeList):
                cubes = iris.cube.CubeList([cubes])
            for cube in cubes:
                if not isinstance(cube, iris.cube.Cube):
                    msg = ("The input data within the {} "
                           "is not an Iris Cube.".format(cube_type))
                    raise TypeError(msg)
            return cubes

        # Ensure predictor_of_mean_flag is valid.
        check_predictor_of_mean_flag(self.predictor_of_mean_flag)

        # Setting default values for optimised_coeffs and coeff_names.
        optimised_coeffs = {}
        coeff_names = ["gamma", "delta", "a", "beta"]

        # Set default values for whether there are NaN values within the
        # initial guess.
        nan_in_initial_guess = False

        for var in [current_forecast, historic_forecast, truth]:
            if (isinstance(var, iris.cube.Cube)
                    or isinstance(var, iris.cube.CubeList)):
                current_forecast_cubes = current_forecast
                historic_forecast_cubes = historic_forecast
                truth_cubes = truth
            else:
                msg = ("{} is not a Cube or CubeList."
                       "Returning default values for optimised_coeffs {} "
                       "and coeff_names {}.").format(var, optimised_coeffs,
                                                     coeff_names)
                warnings.warn(msg)
                return optimised_coeffs, coeff_names

        current_forecast_cubes = (convert_to_cubelist(
            current_forecast_cubes, cube_type="current forecast"))
        historic_forecast_cubes = (convert_to_cubelist(
            historic_forecast_cubes, cube_type="historic forecast"))
        truth_cubes = convert_to_cubelist(truth_cubes, cube_type="truth")

        if (len(current_forecast_cubes) == 0
                or len(historic_forecast_cubes) == 0 or len(truth_cubes) == 0):
            msg = ("Insufficient input data present to estimate "
                   "coefficients using NGR. "
                   "\nNumber of current_forecast_cubes: {}"
                   "\nNumber of historic_forecast_cubes: {}"
                   "\nNumber of truth_cubes: {}".format(
                       len(current_forecast_cubes),
                       len(historic_forecast_cubes), len(truth_cubes)))
            warnings.warn(msg)
            return optimised_coeffs, coeff_names

        current_forecast_cubes = concatenate_cubes(current_forecast_cubes)
        historic_forecast_cubes = concatenate_cubes(historic_forecast_cubes)
        truth_cubes = concatenate_cubes(truth_cubes)

        for current_forecast_cube in current_forecast_cubes.slices_over(
                "time"):
            date = unit.num2date(
                current_forecast_cube.coord("time").points,
                current_forecast_cube.coord("time").units.name,
                current_forecast_cube.coord("time").units.calendar)[0]
            # Extract desired forecast_period from historic_forecast_cubes.
            forecast_period_constr = iris.Constraint(
                forecast_period=current_forecast_cube.coord(
                    "forecast_period").points)
            historic_forecast_cube = historic_forecast_cubes.extract(
                forecast_period_constr)

            # Extract truth matching the time of the historic forecast.
            reference_time = iris_time_to_datetime(
                historic_forecast_cube.coord("time").copy())
            truth_constr = iris.Constraint(
                forecast_reference_time=reference_time)
            truth_cube = truth_cubes.extract(truth_constr)

            if truth_cube is None:
                msg = ("Unable to calibrate for the time points {} "
                       "as no truth data is available."
                       "Moving on to try to calibrate "
                       "next time point.".format(
                           historic_forecast_cube.coord("time").points))
                warnings.warn(msg)
                continue

            # Make sure inputs have the same units.
            historic_forecast_cube.convert_units(self.desired_units)
            truth_cube.convert_units(self.desired_units)

            if self.predictor_of_mean_flag.lower() in ["mean"]:
                no_of_realizations = None
                forecast_predictor = historic_forecast_cube.collapsed(
                    "realization", iris.analysis.MEAN)
            elif self.predictor_of_mean_flag.lower() in ["realizations"]:
                no_of_realizations = len(
                    historic_forecast_cube.coord("realization").points)
                forecast_predictor = historic_forecast_cube

            forecast_var = historic_forecast_cube.collapsed(
                "realization", iris.analysis.VARIANCE)

            # Computing initial guess for EMOS coefficients
            # If no initial guess from a previous iteration, or if there
            # are NaNs in the initial guess, calculate an initial guess.
            if "initial_guess" not in locals() or nan_in_initial_guess:
                initial_guess = self.compute_initial_guess(
                    truth_cube,
                    forecast_predictor,
                    self.predictor_of_mean_flag,
                    self.ESTIMATE_COEFFICIENTS_FROM_LINEAR_MODEL_FLAG,
                    no_of_realizations=no_of_realizations)

            if np.any(np.isnan(initial_guess)):
                nan_in_initial_guess = True

            if not nan_in_initial_guess:
                # Need to access the x attribute returned by the
                # minimisation function.
                optimised_coeffs[date] = (
                    self.minimiser.crps_minimiser_wrapper(
                        initial_guess, forecast_predictor, truth_cube,
                        forecast_var, self.predictor_of_mean_flag,
                        self.distribution.lower()))
                initial_guess = optimised_coeffs[date]
            else:
                optimised_coeffs[date] = initial_guess

        return optimised_coeffs, coeff_names
Exemplo n.º 29
0
    def apply_precip(self, prob_lightning_cube: Cube,
                     prob_precip_cube: Cube) -> Cube:
        """
        Modify Nowcast of lightning probability with precipitation rate
        probabilities at thresholds of 0.5, 7 and 35 mm/h.

        Args:
            prob_lightning_cube:
                First-guess lightning probability.

            prob_precip_cube:
                Nowcast precipitation probability
                (threshold > 0.5, 7., 35. mm hr-1)
                Units of threshold coord modified in-place to mm hr-1

        Returns:
            Output cube containing updated nowcast lightning probability.
            This cube will have the same dimensions and meta-data as
            prob_lightning_cube.

        Raises:
            iris.exceptions.ConstraintMismatchError:
                If prob_precip_cube does not contain the expected thresholds.
        """
        new_cube_list = iris.cube.CubeList([])
        # check prob-precip threshold units are as expected
        precip_threshold_coord = find_threshold_coordinate(prob_precip_cube)
        precip_threshold_coord.convert_units("mm hr-1")
        # extract precipitation probabilities at required thresholds
        for cube_slice in prob_lightning_cube.slices_over("time"):
            this_time = iris_time_to_datetime(
                cube_slice.coord("time").copy())[0]
            this_precip = prob_precip_cube.extract(
                iris.Constraint(time=this_time)
                & iris.Constraint(coord_values={
                    precip_threshold_coord:
                    lambda t: isclose(t.point, 0.5)
                }))
            high_precip = prob_precip_cube.extract(
                iris.Constraint(time=this_time)
                & iris.Constraint(coord_values={
                    precip_threshold_coord:
                    lambda t: isclose(t.point, 7.0)
                }))
            torr_precip = prob_precip_cube.extract(
                iris.Constraint(time=this_time)
                & iris.Constraint(coord_values={
                    precip_threshold_coord:
                    lambda t: isclose(t.point, 35.0)
                }))
            err_string = "No matching {} cube for {}"
            if not isinstance(this_precip, iris.cube.Cube):
                raise ConstraintMismatchError(
                    err_string.format("any precip", this_time))
            if not isinstance(high_precip, iris.cube.Cube):
                raise ConstraintMismatchError(
                    err_string.format("high precip", this_time))
            if not isinstance(torr_precip, iris.cube.Cube):
                raise ConstraintMismatchError(
                    err_string.format("intense precip", this_time))
            # Increase prob(lightning) to Risk 2 (pl_dict[2]) when
            #   prob(precip > 7mm/hr) > phighthresh
            cube_slice.data = np.where(
                (high_precip.data >= self.phighthresh)
                & (cube_slice.data < self.pl_dict[2]),
                self.pl_dict[2],
                cube_slice.data,
            )
            # Increase prob(lightning) to Risk 1 (pl_dict[1]) when
            #   prob(precip > 35mm/hr) > ptorrthresh
            cube_slice.data = np.where(
                (torr_precip.data >= self.ptorrthresh)
                & (cube_slice.data < self.pl_dict[1]),
                self.pl_dict[1],
                cube_slice.data,
            )

            # Decrease prob(lightning) where prob(precip > 0.5 mm hr-1) is low.
            cube_slice.data = apply_double_scaling(this_precip, cube_slice,
                                                   self.precipthr,
                                                   self.ltngthr)

            new_cube_list.append(cube_slice)

        new_cube = new_cube_list.merge_cube()
        new_cube = check_cube_coordinates(prob_lightning_cube, new_cube)
        return new_cube
Exemplo n.º 30
0
    def solar_interpolate(self, diag_cube, interpolated_cube):
        """
        Temporal Interpolation code using solar elevation for
        parameters (e.g. solar radiation parameters like
        Downward Shortwave (SW) radiation or UV index)
        which are zero if the sun is below the horizon and
        scaled by the sine of the solar elevation angle if the sun is above the
        horizon.

        Args:
            diag_cube (iris.cube.Cube):
                cube containing diagnostic data valid at the beginning
                of the period and at the end of the period.
            interpolated_cube (iris.cube.Cube):
                cube containing Linear interpolation of
                diag_cube at interpolation times in time_list.
        Returns:
            interpolated_cubes (iris.cube.CubeList):
                A list of cubes interpolated to the desired times.

        """

        interpolated_cubes = iris.cube.CubeList()
        (lats, lons) = self.calc_lats_lons(diag_cube)
        prev_data = diag_cube[0].data
        next_data = diag_cube[1].data
        dtvals = iris_time_to_datetime(diag_cube.coord('time'))
        # Calculate sine of solar elevation for cube valid at the
        # beginning of the period.
        dtval_prev = dtvals[0]
        sin_phi_prev = self.calc_sin_phi(dtval_prev, lats, lons)
        # Calculate sine of solar elevation for cube valid at the
        # end of the period.
        dtval_next = dtvals[1]
        sin_phi_next = self.calc_sin_phi(dtval_next, lats, lons)
        # Length of time between beginning and end in seconds
        diff_step = (dtval_next - dtval_prev).seconds

        for single_time in interpolated_cube.slices_over('time'):
            # Calculate sine of solar elevation for cube at this
            # interpolated time.
            dtval_interp = iris_time_to_datetime(single_time.coord('time'))[0]
            sin_phi_interp = self.calc_sin_phi(dtval_interp, lats, lons)
            # Length of time between beginning and interpolated time in seconds
            diff_interp = (dtval_interp - dtval_prev).seconds
            # Set all values to 0.0, to be replaced
            # with values calculated through this solar method.
            single_time.data[:] = 0.0
            sun_up = np.where(sin_phi_interp > 0.0)
            # Solar value is calculated only for points where the sun is up
            # and is a weighted combination of the data using the sine of
            # solar elevation and the data in the diag_cube valid
            # at the begining and end.

            # If the diag_cube containing data valid at the
            # beginning of the period and at the end of the period
            # has more than x and y coordinates
            # the calculation needs to adapted to accommodate this.
            if len(single_time.shape) > 2:
                prevv = (prev_data[..., sun_up[0], sun_up[1]] /
                         sin_phi_prev[sun_up])
                nextv = (next_data[..., sun_up[0], sun_up[1]] /
                         sin_phi_next[sun_up])
                single_time.data[..., sun_up[0],
                                 sun_up[1]] = (sin_phi_interp[sun_up] *
                                               (prevv + (nextv - prevv) *
                                                (diff_interp / diff_step)))
            else:
                prevv = prev_data[sun_up] / sin_phi_prev[sun_up]
                nextv = next_data[sun_up] / sin_phi_next[sun_up]
                single_time.data[sun_up] = (sin_phi_interp[sun_up] *
                                            (prevv + (nextv - prevv) *
                                             (diff_interp / diff_step)))
            # cube with new data added to interpolated_cubes cube List.
            interpolated_cubes.append(single_time)
        return interpolated_cubes