Exemplo n.º 1
0
    def _rationalise_blend_time_coords(self, cubelist, cycletime=None):
        """
        Updates time coordinates on unmerged input cubes before blending
        depending on the coordinate over which the blend will be performed.
        Modifies cubes in place.

        If self.blend_coord is forecast_reference_time, ensures the cube does
        not have a forecast_period coordinate (this is recreated after
        blending). If self.weighting_coord is forecast_period, equalises
        forecast_reference_time on each cube before blending.

        Args:
            cubelist (iris.cube.CubeList):
                List of cubes containing data to be blended
            cycletime (str or None):
                The cycletime in a YYYYMMDDTHHMMZ format e.g. 20171122T0100Z

        Raises:
            ValueError: if forecast_reference_time (to be unified) is a
                dimension coordinate
        """
        if "forecast_reference_time" in self.blend_coord:
            for cube in cubelist:
                coord_names = [x.name() for x in cube.coords()]
                if "forecast_period" in coord_names:
                    cube.remove_coord("forecast_period")

        # if blending models using weights by forecast period, set forecast
        # reference times to current cycle time
        if ("model" in self.blend_coord and self.weighting_coord is not None
                and "forecast_period" in self.weighting_coord):
            cycletime = (find_latest_cycletime(cubelist) if cycletime is None
                         else cycletime_to_datetime(cycletime))
            unify_forecast_reference_time(cubelist, cycletime)
Exemplo n.º 2
0
 def test_define_cycletime_format(self):
     """Test when a cycletime is defined."""
     cycletime = "201711220100"
     dt = datetime.datetime(2017, 11, 22, 1, 0)
     result = cycletime_to_datetime(cycletime,
                                    cycletime_format="%Y%m%d%H%M")
     self.assertEqual(result, dt)
Exemplo n.º 3
0
def rebadge_forecasts_as_latest_cycle(
    cubes: Union[CubeList, List[Cube]], cycletime: Optional[str] = None
) -> CubeList:
    """
    Function to update the forecast_reference_time and forecast_period
    on a list of input forecasts to match either a given cycletime, or
    the most recent forecast in the list (proxy for the current cycle).

    Args:
        cubes:
            Cubes that will have their forecast_reference_time and
            forecast_period updated.
        cycletime:
            Required forecast reference time in a YYYYMMDDTHHMMZ format
            e.g. 20171122T0100Z. If None, the latest forecast reference
            time is used.

    Returns:
        Updated cubes
    """
    if cycletime is None and len(cubes) == 1:
        return cubes
    cycle_datetime = (
        _find_latest_cycletime(cubes)
        if cycletime is None
        else cycletime_to_datetime(cycletime)
    )
    return unify_cycletime(cubes, cycle_datetime)
Exemplo n.º 4
0
 def test_basic(self):
     """Test that a datetime object is returned of the expected value."""
     cycletime = "20171122T0100Z"
     dt = datetime.datetime(2017, 11, 22, 1, 0)
     result = cycletime_to_datetime(cycletime)
     self.assertIsInstance(result, datetime.datetime)
     self.assertEqual(result, dt)
Exemplo n.º 5
0
def add_blend_time(cube: Cube, cycletime: str) -> None:
    """
    Function to add scalar blend time coordinate to a blended cube based
    on current cycle time.  Modifies cube in place.

     Args:
        cubes:
            Cube to add blend time coordinate
        cycletime:
            Required blend time in a YYYYMMDDTHHMMZ format e.g. 20171122T0100Z
    """
    cycle_datetime = cycletime_to_datetime(cycletime)
    blend_coord = _create_frt_type_coord(cube, cycle_datetime, name="blend_time")
    cube.add_aux_coord(blend_coord, data_dims=None)
Exemplo n.º 6
0
    def process(self, cubelist):
        """
        Take an input cubelist containing forecasts from different cycles and
        merges them into a single cube.

        The steps taken are:
            1. If no cycletime is given then find the latest cycle time from
               the input cubes.
            2. Update the forecast periods in each input cube to be relative
               to the new cycletime.
            3. Checks if there are duplicate realization numbers. If a
               duplicate is found, renumbers all of the realizations to remove
               any duplicates.
            4. Merge cubes into one cube, removing any metadata that
               doesn't match.
        """
        if self.cycletime is None:
            cycletime = find_latest_cycletime(cubelist)
        else:
            cycletime = cycletime_to_datetime(self.cycletime)
        cubelist = unify_forecast_reference_time(cubelist, cycletime)

        # Take all the realizations from all the input cube and
        # put in one array
        all_realizations = [
            cube.coord("realization").points for cube in cubelist
        ]
        all_realizations = np.concatenate(all_realizations)
        # Find unique realizations
        unique_realizations = np.unique(all_realizations)

        # If we have fewer unique realizations than total realizations we have
        # duplicate realizations so we rebadge all realizations in the cubelist
        if len(unique_realizations) < len(all_realizations):
            first_realization = 0
            for cube in cubelist:
                n_realization = len(cube.coord("realization").points)
                cube.coord("realization").points = np.arange(
                    first_realization, first_realization + n_realization)
                first_realization = first_realization + n_realization

        # slice over realization to deal with cases where direct concatenation
        # would result in a non-monotonic coordinate
        lagged_ensemble = concatenate_cubes(
            cubelist,
            master_coord="realization",
            coords_to_slice_over=["realization"])

        return lagged_ensemble
Exemplo n.º 7
0
def rationalise_blend_time_coords(cubelist,
                                  blend_coord,
                                  cycletime=None,
                                  weighting_coord=None):
    """
    Updates time coordinates on unmerged input cubes before blending depending
    on the coordinate over which the blend will be performed.  Modifies cubes
    in place.

    If blend_coord is forecast_reference_time, ensures the cube does not have
    a forecast_period dimension.  If weighting_coord is forecast_period,
    equalises forecast_reference_time on each cube before blending.

    Args:
        cubelist (iris.cube.CubeList):
            List of cubes containing data to be blended
        blend_coord (str):
            Name of coordinate over which the blend will be performed

    Kwargs:
        cycletime (str or None):
            The cycletime in a YYYYMMDDTHHMMZ format e.g. 20171122T0100Z
        weighting_coord (str or None):
            The coordinate across which weights will be scaled in a
            multi-model blend.

    Raises:
        ValueError: if forecast_reference_time (to be unified) is a
            dimension coordinate
    """
    if "forecast_reference_time" in blend_coord:
        for cube in cubelist:
            coord_names = [x.name() for x in cube.coords()]
            if "forecast_period" in coord_names:
                cube.remove_coord("forecast_period")

    # if blending models using weights by forecast period, set forecast
    # reference times to current cycle time
    if ("model" in blend_coord and weighting_coord is not None
            and "forecast_period" in weighting_coord):
        if cycletime is None:
            cycletime = find_latest_cycletime(cubelist)
        else:
            cycletime = cycletime_to_datetime(cycletime)
        cubelist = unify_forecast_reference_time(cubelist, cycletime)
Exemplo n.º 8
0
def process(
    start_cube: cli.inputcube,
    end_cube: cli.inputcube,
    *,
    interval_in_mins: int = None,
    times: cli.comma_separated_list = None,
    interpolation_method="linear",
):
    """Interpolate data between validity times.

    Interpolate data to intermediate times between the validity times of two
    cubes. This can be used to fill in missing data (e.g. for radar fields)
    or to ensure data is available at the required intervals when model data
    is not available at these times.

    Args:
        start_cube (iris.cube.Cube):
            Cube containing the data at the beginning.
        end_cube (iris.cube.Cube):
            Cube containing the data at the end.
        interval_in_mins (int):
            Specifies the interval in minutes at which to interpolate between
            the two input cubes.
            A number of minutes which does not divide up the interval equally
            will raise an exception.
            If intervals_in_mins is set then times can not be used.
        times (str):
            Specifies the times in the format {YYYYMMDD}T{HHMM}Z
            at which to interpolate between the two input cubes.
            Where {YYYYMMDD} is year, month, day and {HHMM} is hour and minutes
            e.g 20180116T0100Z. More than one time can be provided separated
            by a comma.
            If times are set, interval_in_mins can not be used.
        interpolation_method (str):
            ["linear", "solar", "daynight"]
            Specifies the interpolation method;
            solar interpolates using the solar elevation,
            daynight uses linear interpolation but sets night time points to
            0.0 linear is linear interpolation.

    Returns:
        iris.cube.CubeList:
            A list of cubes interpolated to the desired times. The
            interpolated cubes will always be in chronological order of
            earliest to latest regardless of the order of the input.
    """
    from improver.utilities.cube_manipulation import MergeCubes
    from improver.utilities.temporal import cycletime_to_datetime, iris_time_to_datetime
    from improver.utilities.temporal_interpolation import TemporalInterpolation

    (time_start,) = iris_time_to_datetime(start_cube.coord("time"))
    (time_end,) = iris_time_to_datetime(end_cube.coord("time"))
    if time_end < time_start:
        # swap cubes
        start_cube, end_cube = end_cube, start_cube

    if times is not None:
        times = [cycletime_to_datetime(timestr) for timestr in times]

    result = TemporalInterpolation(
        interval_in_minutes=interval_in_mins,
        times=times,
        interpolation_method=interpolation_method,
    )(start_cube, end_cube)
    return MergeCubes()(result)
Exemplo n.º 9
0
def process(
    mandatory_attributes_json: cli.inputjson,
    *,
    name="air_pressure_at_sea_level",
    units=None,
    spatial_grid="latlon",
    time_period: int = None,
    json_input: cli.inputjson = None,
    ensemble_members: int = 8,
    grid_spacing: float = None,
    domain_corner: cli.comma_separated_list_of_float = None,
    npoints: int = 71,
):
    """ Generate a cube with metadata only.

    Args:
        mandatory_attributes_json (Dict):
            Specifies the values of the mandatory attributes, title, institution and
            source.
        name (Optional[str]):
            Output variable name, or if creating a probability cube the name of the
            underlying variable to which the probability field applies.
        units (Optional[str]):
            Output variable units, or if creating a probability cube the units of the
            underlying variable / threshold.
        spatial_grid (Optional[str]):
            What type of x/y coordinate values to use.  Permitted values are
            "latlon" or "equalarea".
        time_period (Optional[int]):
            The period in minutes between the time bounds. This is used to calculate
            the lower time bound. If unset the diagnostic will be instantaneous, i.e.
            without time bounds.
        json_input (Optional[Dict]):
            Dictionary containing values for one or more of: "name", "units", "time",
            "time_bounds", "frt", "spp__relative_to_threshold", "attributes"
            (dictionary of additional metadata attributes) and "coords" (dictionary).
            "coords" can contain "height_levels" (list of height/pressure level values),
            and one of "realizations", "percentiles" or "thresholds" (list of dimension
            values).
        ensemble_members (Optional[int]):
            Number of ensemble members. Default 8. Will not be used if "realizations",
            "percentiles" or "thresholds" provided in json_input.
        grid_spacing (Optional[float]):
            Resolution of grid (metres or degrees).
        domain_corner (Optional[Tuple[float, float]]):
            Bottom left corner of grid domain (y,x) (degrees for latlon or metres for
            equalarea).
        npoints (Optional[int]):
            Number of points along each of the y and x spatial axes.

    Returns:
        iris.cube.Cube:
            Output of generate_metadata()
    """
    # Set arguments to pass to generate_metadata function and remove json_input for
    # processing contents before adding
    generate_metadata_args = locals()
    for key in ["mandatory_attributes_json", "json_input"]:
        generate_metadata_args.pop(key, None)

    from improver.synthetic_data.generate_metadata import generate_metadata
    from improver.synthetic_data.utilities import (
        get_height_levels,
        get_leading_dimension,
    )
    from improver.utilities.temporal import cycletime_to_datetime

    if json_input is not None:
        # Get leading dimension and height/pressure data from json_input
        if "coords" in json_input:
            coord_data = json_input["coords"]

            (
                json_input["leading_dimension"],
                json_input["cube_type"],
            ) = get_leading_dimension(coord_data)
            json_input["height_levels"], json_input["pressure"] = get_height_levels(
                coord_data
            )

            json_input.pop("coords", None)

        # Convert str time, frt and time_bounds to datetime
        if "time" in json_input:
            json_input["time"] = cycletime_to_datetime(json_input["time"])

        if "frt" in json_input:
            json_input["frt"] = cycletime_to_datetime(json_input["frt"])

        if "time_bounds" in json_input:
            time_bounds = []
            for tb in json_input["time_bounds"]:
                time_bounds.append(cycletime_to_datetime(tb))
            json_input["time_bounds"] = time_bounds

        # Update generate_metadata_args with the json_input data
        generate_metadata_args.update(json_input)
    return generate_metadata(mandatory_attributes_json, **generate_metadata_args)
Exemplo n.º 10
0
def process(cube_0,
            cube_1,
            interval_in_mins=None,
            in_times=None,
            interpolation_method='linear'):
    """Module to interpolate data between validity times.

    Interpolate data to intermediate times between the validity times of two
    cubes. This can be used to fill in missing data (e.g. for radar fields)
    or to ensure data is available at the required intervals when model data
    is not available at these times.

    Args:
        cube_0 (iris.cube.Cube):
            Cube containing the data at the beginning.
        cube_1 (iris.cube.Cube):
            Cube containing the data at the end.
        interval_in_mins (int):
            Specifies the interval in minutes at which to interpolate between
            the two input cubes.
            A number of minutes which does not divide up the interval equally
            will raise an exception.
            If intervals_in_mins is set then in_times can not be used.
            Default is None.
        in_times (str):
            Specifies the times in the format {YYYYMMDD}T{HHMM}Z
            at which to interpolate between the two input cubes.
            Where {YYYYMMDD} is year, month, day and {HHMM} is hour and minutes
            e.g 20180116T0100Z. More than one time can be provided separated
            by a space.
            If in_times are set, interval_in_mins can not be used.
            Default is None.
        interpolation_method (str):
            ["linear", "solar", "daynight"]
            Specifies the interpolation method;
            solar interpolates using the solar elevation,
            daynight uses linear interpolation but sets night time points to
            0.0 linear is linear interpolation.
            Default is linear.

    Returns:
        iris.cube.CubeList:
            A list of cubes interpolated to the desired times. The
            interpolated cubes will always be in chronological order of
            earliest to latest regardless of the order of the input.
    """
    time_0, = iris_time_to_datetime(cube_0.coord('time'))
    time_1, = iris_time_to_datetime(cube_1.coord('time'))
    if time_0 < time_1:
        cube_start = cube_0
        cube_end = cube_1
    else:
        cube_start = cube_1
        cube_end = cube_0

    interval = interval_in_mins
    method = interpolation_method
    if in_times is None:
        times = in_times
    else:
        times = []
        for timestr in in_times:
            times.append(cycletime_to_datetime(timestr))

    result = (TemporalInterpolation(interval_in_minutes=interval,
                                    times=times,
                                    interpolation_method=method).process(
                                        cube_start, cube_end))
    return result
Exemplo n.º 11
0
    def create_coefficients_cube(
            self, optimised_coeffs, historic_forecast):
        """Create a cube for storing the coefficients computed using EMOS.

        .. See the documentation for examples of these cubes.
        .. include:: extended_documentation/ensemble_calibration/
           ensemble_calibration/create_coefficients_cube.rst

        Args:
            optimised_coeffs (list):
                List of optimised coefficients.
                Order of coefficients is [gamma, delta, alpha, beta].
            historic_forecast (iris.cube.Cube):
                The cube containing the historic forecast.

        Returns:
            cube (iris.cube.Cube):
                Cube constructed using the coefficients provided and using
                metadata from the historic_forecast cube.  The cube contains
                a coefficient_index dimension coordinate where the points
                of the coordinate are integer values and a
                coefficient_name auxiliary coordinate where the points of
                the coordinate are e.g. gamma, delta, alpha, beta.

        """
        if self.predictor_of_mean_flag.lower() == "realizations":
            realization_coeffs = []
            for realization in historic_forecast.coord("realization").points:
                realization_coeffs.append(
                    "{}{}".format(self.coeff_names[-1], np.int32(realization)))
            coeff_names = self.coeff_names[:-1] + realization_coeffs
        else:
            coeff_names = self.coeff_names

        if len(optimised_coeffs) != len(coeff_names):
            msg = ("The number of coefficients in {} must equal the "
                   "number of coefficient names {}.".format(
                        optimised_coeffs, coeff_names))
            raise ValueError(msg)

        coefficient_index = iris.coords.DimCoord(
            np.arange(len(optimised_coeffs), dtype=np.int32),
            long_name="coefficient_index", units="1")
        coefficient_name = iris.coords.AuxCoord(
            coeff_names, long_name="coefficient_name", units="no_unit")
        dim_coords_and_dims = [(coefficient_index, 0)]
        aux_coords_and_dims = [(coefficient_name, 0)]

        # Create a forecast_reference_time coordinate.
        frt_point = cycletime_to_datetime(self.current_cycle)
        try:

            frt_coord = (
                historic_forecast.coord("forecast_reference_time").copy(
                    datetime_to_iris_time(frt_point, time_units="seconds")))
        except CoordinateNotFoundError:
            pass
        else:
            aux_coords_and_dims.append((frt_coord, None))

        # Create forecast period and time coordinates.
        try:
            fp_point = (
                np.unique(historic_forecast.coord("forecast_period").points))
            fp_coord = (
                historic_forecast.coord("forecast_period").copy(fp_point))
        except CoordinateNotFoundError:
            pass
        else:
            aux_coords_and_dims.append((fp_coord, None))
            if historic_forecast.coords("time"):
                frt_point = cycletime_to_datetime(self.current_cycle)
                # Ensure that the fp_point is determined with units of seconds.
                copy_of_fp_coord = (
                    historic_forecast.coord("forecast_period").copy())
                copy_of_fp_coord.convert_units("seconds")
                fp_point, = np.unique(copy_of_fp_coord.points)
                time_point = (
                    frt_point + datetime.timedelta(seconds=float(fp_point)))
                time_point = datetime_to_iris_time(
                    time_point,
                    time_units=str(historic_forecast.coord("time").units))
                time_coord = historic_forecast.coord("time").copy(time_point)
                aux_coords_and_dims.append((time_coord, None))

        attributes = {"diagnostic_standard_name": historic_forecast.name()}
        for attribute in historic_forecast.attributes.keys():
            if attribute.endswith("model_configuration"):
                attributes[attribute] = (
                    historic_forecast.attributes[attribute])

        cube = iris.cube.Cube(
            optimised_coeffs, long_name="emos_coefficients", units="1",
            dim_coords_and_dims=dim_coords_and_dims,
            aux_coords_and_dims=aux_coords_and_dims, attributes=attributes)
        return cube
Exemplo n.º 12
0
def main(argv=None):
    """
    Interpolate data to intermediate times between the validity times of two
    cubes. This can be used to fill in missing data (e.g. for radar fields) or
    to ensure data is available at the required intervals when model data is
    not available at these times.
    """
    parser = ArgParser(description='Interpolate data between validity times ')

    parser.add_argument('infiles',
                        metavar='INFILES',
                        nargs=2,
                        help='Files contain the data at the beginning'
                        ' and end of the period (2 files required).')

    group = parser.add_mutually_exclusive_group(required=True)

    group.add_argument("--interval_in_mins",
                       metavar="INTERVAL_IN_MINS",
                       default=None,
                       type=int,
                       help="Specifies the interval in minutes"
                       " at which to interpolate "
                       "between the two input cubes."
                       " A number of minutes which does not "
                       "divide up the interval equally will "
                       "raise an exception. If intervals_in_mins "
                       "is set then times can not be set.")

    group.add_argument("--times",
                       metavar="TIMES",
                       default=None,
                       nargs="+",
                       type=str,
                       help="Specifies the times in the format "
                       "{YYYYMMDD}T{HHMM}Z "
                       " at which to interpolate "
                       "between the two input cubes."
                       "Where {YYYYMMDD} is year, month day "
                       "and {HHMM} is hour and minutes e.g "
                       "20180116T0100Z. More than one time"
                       "can be provided separated by a space "
                       "but if times are set interval_in_mins "
                       "can not be set")

    parser.add_argument("--interpolation_method",
                        metavar="INTERPOLATION_METHOD",
                        default="linear",
                        choices=["linear", "solar", "daynight"],
                        help="Specifies the interpolation method; "
                        "solar interpolates using the solar elevation, "
                        "daynight uses linear interpolation but sets"
                        " night time points to 0.0, "
                        "linear is linear interpolation. "
                        "Default is linear.")

    parser.add_argument("--output_files",
                        metavar="OUTPUT_FILES",
                        required=True,
                        nargs="+",
                        help="List of output files."
                        " The interpolated files will always be"
                        " in the chronological order of"
                        " earliest to latest "
                        " regardless of the order of the infiles.")

    args = parser.parse_args(args=argv)

    cube_0 = load_cube(args.infiles[0])
    cube_1 = load_cube(args.infiles[1])
    time_0, = iris_time_to_datetime(cube_0.coord('time'))
    time_1, = iris_time_to_datetime(cube_1.coord('time'))
    if time_0 < time_1:
        cube_start = cube_0
        cube_end = cube_1
    else:
        cube_start = cube_1
        cube_end = cube_0

    interval = args.interval_in_mins
    method = args.interpolation_method
    if args.times is None:
        times = args.times
    else:
        times = []
        for timestr in args.times:
            times.append(cycletime_to_datetime(timestr))

    interpolated_cubes = (TemporalInterpolation(
        interval_in_minutes=interval, times=times,
        interpolation_method=method).process(cube_start, cube_end))

    len_files = len(args.output_files)
    len_cubes = len(interpolated_cubes)
    if len_files == len_cubes:
        for i, cube_out in enumerate(interpolated_cubes):
            save_netcdf(cube_out, args.output_files[i])
    else:
        msg = ("Output_files do not match cubes created. "
               "{} files given but {} required.".format(len_files, len_cubes))
        raise ValueError(msg)