Ejemplo n.º 1
0
def calculate_area(features, mask, method_area=None):
    from tobac.utils import mask_features_surface, mask_features
    from iris import Constraint
    from iris.analysis.cartography import area_weights

    features["area"] = np.nan

    mask_coords = [coord.name() for coord in mask.coords()]
    if method_area is None:
        if ("projection_x_coordinate" in mask_coords) and (
            "projection_y_coordinate" in mask_coords
        ):
            method_area = "xy"
        elif ("latitude" in mask_coords) and ("longitude" in mask_coords):
            method_area = "latlon"
        else:
            raise ValueError(
                "either latitude/longitude or projection_x_coordinate/projection_y_coordinate have to be present to calculate distances"
            )
    logging.debug("calculating area using method " + method_area)
    if method_area == "xy":
        if not (
            mask.coord("projection_x_coordinate").has_bounds()
            and mask.coord("projection_y_coordinate").has_bounds()
        ):
            mask.coord("projection_x_coordinate").guess_bounds()
            mask.coord("projection_y_coordinate").guess_bounds()
        area = np.outer(
            np.diff(mask.coord("projection_x_coordinate").bounds, axis=1),
            np.diff(mask.coord("projection_y_coordinate").bounds, axis=1),
        )
    elif method_area == "latlon":
        if (mask.coord("latitude").ndim == 1) and (mask.coord("latitude").ndim == 1):
            if not (
                mask.coord("latitude").has_bounds()
                and mask.coord("longitude").has_bounds()
            ):
                mask.coord("latitude").guess_bounds()
                mask.coord("longitude").guess_bounds()
            area = area_weights(mask, normalize=False)
        elif mask.coord("latitude").ndim == 2 and mask.coord("longitude").ndim == 2:
            raise ValueError("2D latitude/longitude coordinates not supported yet")
            # area=calculate_areas_2Dlatlon(mask.coord('latitude'),mask.coord('longitude'))
        else:
            raise ValueError("latitude/longitude coordinate shape not supported")
    else:
        raise ValueError("method undefined")

    for time_i, features_i in features.groupby("time"):
        logging.debug("timestep:" + str(time_i))
        constraint_time = Constraint(time=time_i)
        mask_i = mask.extract(constraint_time)
        for i in features_i.index:
            if len(mask_i.shape) == 3:
                mask_i_surface = mask_features_surface(
                    mask_i, features_i.loc[i, "feature"], z_coord="model_level_number"
                )
            elif len(mask_i.shape) == 2:
                mask_i_surface = mask_features(mask_i, features_i.loc[i, "feature"])
            area_feature = np.sum(area * (mask_i_surface.data > 0))
            features.at[i, "area"] = area_feature
    return features
Ejemplo n.º 2
0
def parse_constraint_list(
        constraints: List[str],
        units: Optional[List[str]] = None
) -> Tuple[Constraint, Optional[Dict]]:
    """
    For simple constraints of a key=value format, these are passed in as a
    list of strings and converted to key-value pairs prior to creating the
    constraints.
    For more complex constraints, the list of strings given as input
    are evaluated by parsing for specific identifiers and then the constraints
    are created as required.
    The simple key-value pairs and other constraints are merged into a single
    constraint.

    Args:
        constraints:
            List of string constraints with keys and values split by "=":
            e.g: ["kw1=val1", "kw2 = val2", "kw3=val3"].
        units:
            List of units (as strings) corresponding to each coordinate in the
            list of constraints.  One or more "units" may be None, and units
            may only be associated with coordinate constraints.

    Returns:
        - A combination of all the constraints that were supplied.
        - A dictionary of unit keys and values
    """

    if units is None:
        list_units = len(constraints) * [None]
        units_dict = None
    else:
        if len(units) != len(constraints):
            msg = "units list must match constraints"
            raise ValueError(msg)
        list_units = units
        units_dict = {}

    simple_constraints_dict = {}
    complex_constraints = []
    for constraint_pair, unit_val in zip(constraints, list_units):
        key, value = constraint_pair.split("=", 1)
        key = key.strip(" ")
        value = value.strip(" ")

        if is_complex_parsing_required(value):
            complex_constraints.append(create_range_constraint(key, value))
        else:
            try:
                typed_value = literal_eval(value)
            except ValueError:
                simple_constraints_dict[key] = value
            else:
                simple_constraints_dict[key] = create_constraint(typed_value)

        if unit_val is not None and unit_val.capitalize() != "None":
            units_dict[key] = unit_val.strip(" ")

    if simple_constraints_dict:
        simple_constraints = Constraint(**simple_constraints_dict)
    else:
        simple_constraints = None

    constraints = simple_constraints
    for constr in complex_constraints:
        constraints = constraints & constr

    return constraints, units_dict
Ejemplo n.º 3
0
def extract_cell_cubes_subset(cubelist_in,
                              mask,
                              track,
                              cell,
                              z_coord='model_level_number',
                              height_levels=None):

    from iris.analysis import SUM
    from iris import Constraint
    from iris.cube import CubeList
    from iris.coords import AuxCoord
    import numpy as np
    from tobac import mask_cell, mask_cell_surface, get_bounding_box
    from copy import deepcopy

    track_i = track[track['cell'] == cell]

    cubelist_cell_integrated_out = CubeList()
    cubelist_cell_sum = CubeList()

    for time_i in track_i['time'].values:

        logging.debug('start extracting cubes for cell ' + str(cell) +
                      ' and time ' + str(time_i))

        constraint_time = Constraint(time=time_i)
        mask_i = mask.extract(constraint_time)
        mask_cell_i = mask_cell(mask_i, cell, track_i, masked=False)
        mask_cell_surface_i = mask_cell_surface(mask_i,
                                                cell,
                                                track_i,
                                                masked=False,
                                                z_coord=z_coord)

        x_dim = mask_cell_surface_i.coord_dims('projection_x_coordinate')[0]
        y_dim = mask_cell_surface_i.coord_dims('projection_y_coordinate')[0]
        x_coord = mask_cell_surface_i.coord('projection_x_coordinate')
        y_coord = mask_cell_surface_i.coord('projection_y_coordinate')

        if (mask_cell_surface_i.core_data() > 0).any():
            box_mask_i = get_bounding_box(mask_cell_surface_i.core_data(),
                                          buffer=1)

            box_mask = [[
                x_coord.points[box_mask_i[x_dim][0]],
                x_coord.points[box_mask_i[x_dim][1]]
            ],
                        [
                            y_coord.points[box_mask_i[y_dim][0]],
                            y_coord.points[box_mask_i[y_dim][1]]
                        ]]
        else:
            box_mask = [[np.nan, np.nan], [np.nan, np.nan]]

        width = 20
        dx = 500
        x = track_i[track_i['time'].values ==
                    time_i]['projection_x_coordinate'].values[0]
        y = track_i[track_i['time'].values ==
                    time_i]['projection_y_coordinate'].values[0]

        n_add_width = 2

        box_slice = [[
            x - (width + n_add_width) * dx, x + (width + n_add_width) * dx
        ], [y - (width + n_add_width) * dx, y + (width + n_add_width) * dx]]

        x_min = np.nanmin([box_mask[0][0], box_slice[0][0]])
        x_max = np.nanmax([box_mask[0][1], box_slice[0][1]])
        y_min = np.nanmin([box_mask[1][0], box_slice[1][0]])
        y_max = np.nanmax([box_mask[1][1], box_slice[1][1]])

        constraint_x = Constraint(projection_x_coordinate=lambda cell: int(
            x_min) < cell < int(x_max))
        constraint_y = Constraint(projection_y_coordinate=lambda cell: int(
            y_min) < cell < int(y_max))

        constraint = constraint_time & constraint_x & constraint_y

        mask_cell_i = mask_cell_i.extract(constraint_x & constraint_y)
        mask_cell_surface_i = mask_cell_surface_i.extract(constraint_x
                                                          & constraint_y)

        cubelist_i = cubelist_in.extract(constraint)

        cubelist_cell_sum.extend(
            sum_profile_mask(cubelist_i, height_levels, mask_cell_i))
    cubelist_cell_sum_out = cubelist_cell_sum.merge()
    for cube in cubelist_cell_sum_out:
        cell_time_coord = AuxCoord(
            track_i['time_cell'].dt.total_seconds().values,
            units='s',
            long_name='time_cell')
        cube.add_aux_coord(cell_time_coord, cube.coord_dims('time')[0])

    for cube in cubelist_cell_sum_out:
        cubelist_cell_integrated_out.append(
            cube.collapsed(('geopotential_height'), SUM))

    track_cell_integrated = deepcopy(track_i)
    #
    for cube in cubelist_cell_integrated_out:
        track_cell_integrated[cube.name()] = cube.core_data()

    return cubelist_cell_sum_out, cubelist_cell_integrated_out, track_cell_integrated
Ejemplo n.º 4
0
    def setUp(self):
        """
        Create a cube containing a regular lat-lon grid.

        Data is striped horizontally,
        e.g.
              1 1 1 1 1 1
              1 1 1 1 1 1
              2 2 2 2 2 2
              2 2 2 2 2 2
              3 3 3 3 3 3
              3 3 3 3 3 3
        """
        data = np.ones((12, 12))
        data[0:4, :] = 1
        data[4:8, :] = 2
        data[8:, :] = 3

        latitudes = np.linspace(-90, 90, 12)
        longitudes = np.linspace(-180, 180, 12)
        latitude = DimCoord(latitudes,
                            standard_name='latitude',
                            units='degrees',
                            coord_system=GeogCS(6371229.0))
        longitude = DimCoord(longitudes,
                             standard_name='longitude',
                             units='degrees',
                             coord_system=GeogCS(6371229.0),
                             circular=True)

        # Use time of 2017-02-17 06:00:00
        time = DimCoord([1487311200],
                        standard_name='time',
                        units=cf_units.Unit(
                            'seconds since 1970-01-01 00:00:00',
                            calendar='gregorian'))
        long_time_coord = DimCoord(range(1487311200, 1487397600, 3600),
                                   standard_name='time',
                                   units=cf_units.Unit(
                                       'seconds since 1970-01-01 00:00:00',
                                       calendar='gregorian'))

        time_dt = dt(2017, 2, 17, 6, 0)
        time_extract = Constraint(
            time=lambda cell: cell.point == PartialDateTime(
                time_dt.year, time_dt.month, time_dt.day, time_dt.hour))

        cube = Cube(data.reshape((1, 12, 12)),
                    long_name="air_temperature",
                    dim_coords_and_dims=[(time, 0), (latitude, 1),
                                         (longitude, 2)],
                    units="K")

        long_cube = Cube(np.arange(3456).reshape(24, 12, 12),
                         long_name="air_temperature",
                         dim_coords_and_dims=[(long_time_coord, 0),
                                              (latitude, 1), (longitude, 2)],
                         units="K")

        orography = Cube(np.ones((12, 12)),
                         long_name="surface_altitude",
                         dim_coords_and_dims=[(latitude, 0), (longitude, 1)],
                         units="m")

        # Western half of grid at altitude 0, eastern half at 10.
        # Note that the pressure_on_height_levels data is left unchanged,
        # so it is as if there is a sharp front running up the grid with
        # differing pressures on either side at equivalent heights above
        # the surface (e.g. east 1000hPa at 0m AMSL, west 1000hPa at 10m AMSL).
        # So there is higher pressure in the west.
        orography.data[0:10] = 0
        orography.data[10:] = 10
        ancillary_data = {}
        ancillary_data['orography'] = orography

        additional_data = {}
        adlist = CubeList()
        adlist.append(cube)
        additional_data['air_temperature'] = adlist

        data_indices = [list(data.nonzero()[0]), list(data.nonzero()[1])]

        self.cube = cube
        self.long_cube = long_cube
        self.data = data
        self.time_dt = time_dt
        self.time_extract = time_extract
        self.data_indices = data_indices
        self.ancillary_data = ancillary_data
        self.additional_data = additional_data
Ejemplo n.º 5
0
def make_constraint(var_name="hgtprs"):
    return Constraint(cube_func=lambda cube: cube.var_name == var_name)
Ejemplo n.º 6
0
def main(argv=None):
    """Extrapolate data forward in time."""

    parser = ArgParser(
        description="Extrapolate input data to required lead times.")
    parser.add_argument("input_filepath", metavar="INPUT_FILEPATH",
                        type=str, help="Path to input NetCDF file.")

    group = parser.add_mutually_exclusive_group()
    group.add_argument("--output_dir", metavar="OUTPUT_DIR", type=str,
                       default="", help="Directory to write output files.")
    group.add_argument("--output_filepaths", nargs="+", type=str,
                       help="List of full paths to output nowcast files, in "
                       "order of increasing lead time.")

    optflw = parser.add_argument_group('Advect using files containing the x '
                                       ' and y components of the velocity')
    optflw.add_argument("--eastward_advection_filepath", type=str, help="Path"
                        " to input file containing Eastward advection "
                        "velocities.")
    optflw.add_argument("--northward_advection_filepath", type=str, help="Path"
                        " to input file containing Northward advection "
                        "velocities.")

    speed = parser.add_argument_group('Advect using files containing speed and'
                                      ' direction')
    speed.add_argument("--advection_speed_filepath", type=str, help="Path"
                       " to input file containing advection speeds,"
                       " usually wind speeds, on multiple pressure levels.")
    speed.add_argument("--advection_direction_filepath", type=str,
                       help="Path to input file containing the directions from"
                       " which advection speeds are coming (180 degrees from"
                       " the direction in which the speed is directed). The"
                       " directions should be on the same grid as the input"
                       " speeds, including the same vertical levels.")
    speed.add_argument("--pressure_level", type=int, default=75000, help="The"
                       " pressure level in Pa to extract from the multi-level"
                       " advection_speed and advection_direction files. The"
                       " velocities at this level are used for advection.")
    parser.add_argument("--orographic_enhancement_filepaths", nargs="+",
                        type=str, default=None, help="List or wildcarded "
                        "file specification to the input orographic "
                        "enhancement files. Orographic enhancement files are "
                        "compulsory for precipitation fields.")
    parser.add_argument("--json_file", metavar="JSON_FILE", default=None,
                        help="Filename for the json file containing "
                        "required changes to the metadata. Information "
                        "describing the intended contents of the json file "
                        "is available in "
                        "improver.utilities.cube_metadata.amend_metadata."
                        "Every output cube will have the metadata_dict "
                        "applied. Defaults to None.", type=str)
    parser.add_argument("--max_lead_time", type=int, default=360,
                        help="Maximum lead time required (mins).")
    parser.add_argument("--lead_time_interval", type=int, default=15,
                        help="Interval between required lead times (mins).")

    accumulation_args = parser.add_argument_group(
        'Calculate accumulations from advected fields')
    accumulation_args.add_argument(
        "--accumulation_fidelity", type=int, default=0,
        help="If set, this CLI will additionally return accumulations"
        " calculated from the advected fields. This fidelity specifies the"
        " time interval in minutes between advected fields that is used to"
        " calculate these accumulations. This interval must be a factor of"
        " the lead_time_interval.")
    accumulation_args.add_argument(
        "--accumulation_units", type=str, default='m',
        help="Desired units in which the accumulations should be expressed,"
        "e.g. mm")

    args = parser.parse_args(args=argv)

    upath, vpath = (args.eastward_advection_filepath,
                    args.northward_advection_filepath)
    spath, dpath = (args.advection_speed_filepath,
                    args.advection_direction_filepath)

    # load files and initialise advection plugin
    input_cube = load_cube(args.input_filepath)
    if (upath and vpath) and not (spath or dpath):
        ucube = load_cube(upath)
        vcube = load_cube(vpath)
    elif (spath and dpath) and not (upath or vpath):
        level_constraint = Constraint(pressure=args.pressure_level)
        try:
            scube = load_cube(spath, constraints=level_constraint)
            dcube = load_cube(dpath, constraints=level_constraint)
        except ValueError as err:
            raise ValueError(
                '{} Unable to extract specified pressure level from given '
                'speed and direction files.'.format(err))

        ucube, vcube = ResolveWindComponents().process(scube, dcube)
    else:
        raise ValueError('Cannot mix advection component velocities with speed'
                         ' and direction')

    oe_cube = None
    if args.orographic_enhancement_filepaths:
        oe_cube = load_cube(args.orographic_enhancement_filepaths)

    metadata_dict = None
    if args.json_file:
        # Load JSON file for metadata amendments.
        with open(args.json_file, 'r') as input_file:
            metadata_dict = json.load(input_file)

    # generate list of lead times in minutes
    lead_times = np.arange(0, args.max_lead_time+1,
                           args.lead_time_interval)

    if args.output_filepaths:
        if len(args.output_filepaths) != len(lead_times):
            raise ValueError("Require exactly one output file name for each "
                             "forecast lead time")

    # determine whether accumulations are also to be returned.
    time_interval = args.lead_time_interval
    if args.accumulation_fidelity > 0:
        fraction, _ = np.modf(args.lead_time_interval /
                              args.accumulation_fidelity)
        if fraction != 0:
            msg = ("The specified lead_time_interval ({}) is not cleanly "
                   "divisible by the specified accumulation_fidelity ({}). As "
                   "a result the lead_time_interval cannot be constructed from"
                   " accumulation cubes at this fidelity.".format(
                       args.lead_time_interval, args.accumulation_fidelity))
            raise ValueError(msg)

        time_interval = args.accumulation_fidelity
        lead_times = np.arange(0, args.max_lead_time+1, time_interval)

    lead_time_filter = args.lead_time_interval // time_interval

    forecast_plugin = CreateExtrapolationForecast(
        input_cube, ucube, vcube, orographic_enhancement_cube=oe_cube,
        metadata_dict=metadata_dict)

    # extrapolate input data to required lead times
    forecast_cubes = iris.cube.CubeList()
    for i, lead_time in enumerate(lead_times):
        forecast_cubes.append(
            forecast_plugin.extrapolate(leadtime_minutes=lead_time))

    # return rate cubes
    for i, cube in enumerate(forecast_cubes[::lead_time_filter]):
        # save to a suitably-named output file
        if args.output_filepaths:
            file_name = args.output_filepaths[i]
        else:
            file_name = os.path.join(
                args.output_dir, generate_file_name(cube))
        save_netcdf(cube, file_name)

    # calculate accumulations if required
    if args.accumulation_fidelity > 0:
        plugin = Accumulation(accumulation_units=args.accumulation_units,
                              accumulation_period=args.lead_time_interval * 60)
        accumulation_cubes = plugin.process(forecast_cubes)

        # return accumulation cubes
        for i, cube in enumerate(accumulation_cubes):
            file_name = os.path.join(args.output_dir, generate_file_name(cube))
            save_netcdf(cube, file_name)
Ejemplo n.º 7
0
    def setUp(self):
        """Create a cube containing a regular lat-lon grid and other necessary
        ingredients for unit tests."""

        data = np.arange(0, 800, 1)
        data.resize(2, 20, 20)
        latitudes = np.linspace(-90, 90, 20)
        longitudes = np.linspace(-180, 180, 20)
        latitude = DimCoord(latitudes,
                            standard_name='latitude',
                            units='degrees',
                            var_name='latitude')
        longitude = DimCoord(longitudes,
                             standard_name='longitude',
                             units='degrees',
                             var_name='longitude')

        # Use time of 2017-02-17 06:00:00, 07:00:00
        time = DimCoord([1487311200, 1487314800],
                        standard_name='time',
                        units=cf_units.Unit(
                            'seconds since 1970-01-01 00:00:00',
                            calendar='gregorian'),
                        var_name='time')

        time_dt = dt(2017, 2, 17, 6, 0)
        time_extract = Constraint(time=PartialDateTime(
            time_dt.year, time_dt.month, time_dt.day, time_dt.hour))

        cube = Cube(data,
                    long_name="air_temperature",
                    dim_coords_and_dims=[(time, 0), (latitude, 1),
                                         (longitude, 2)],
                    units="K")
        cube2 = cube.copy()

        orography = Cube(np.ones((20, 20)),
                         long_name="surface_altitude",
                         dim_coords_and_dims=[(latitude, 0), (longitude, 1)],
                         units="m")

        land = orography.copy()
        land.rename('land_binary_mask')
        land.data = land.data + 1

        ancillary_data = {}
        ancillary_data.update({'orography': orography})
        ancillary_data.update({'land_mask': land})

        # Copies of cube simply renamed to be read as additional data.
        temperature_on_height_levels = cube.copy()
        temperature_on_height_levels.rename('temperature_on_height_levels')
        pressure_on_height_levels = cube.copy()
        pressure_on_height_levels.rename('pressure_on_height_levels')
        surface_pressure = cube.copy()
        surface_pressure.rename('surface_pressure')

        # Build reference copy of additional_data dictionary.
        with iris.FUTURE.context(cell_datetime_objects=True):
            additional_data = {
                'temperature_on_height_levels':
                CubeList([temperature_on_height_levels]),
                'pressure_on_height_levels':
                CubeList([pressure_on_height_levels]),
                'surface_pressure':
                CubeList([surface_pressure])
            }

        self.data_directory = mkdtemp()

        self.cube_file = (self.data_directory +
                          '/01-temperature_at_screen_level.nc')
        self.cube_file2 = (self.data_directory +
                           '/02-temperature_at_screen_level.nc')
        orography_file = self.data_directory + '/orography.nc'
        land_file = self.data_directory + '/land_mask.nc'
        ad_file_temperature = (self.data_directory +
                               '/temperature_on_height_levels.nc')
        ad_file_pressure = (self.data_directory +
                            '/pressure_on_height_levels.nc')
        ad_file_s_pressure = self.data_directory + '/surface_pressure.nc'

        iris.save(cube, self.cube_file)
        iris.save(cube2, self.cube_file2)
        iris.save(orography, orography_file)
        iris.save(land, land_file)
        iris.save(temperature_on_height_levels, ad_file_temperature)
        iris.save(pressure_on_height_levels, ad_file_pressure)
        iris.save(surface_pressure, ad_file_s_pressure)

        diagnostic_recipe = {
            "temperature": {
                "diagnostic_name": "air_temperature",
                "extrema": True,
                "filepath": "temperature_at_screen_level",
                "neighbour_finding": {
                    "land_constraint": False,
                    "method": "fast_nearest_neighbour",
                    "vertical_bias": None
                }
            }
        }

        self.config_file = self.data_directory + '/spotdata_diagnostics.json'
        ff = open(self.config_file, 'w')
        json.dump(diagnostic_recipe,
                  ff,
                  sort_keys=True,
                  indent=4,
                  separators=(
                      ',',
                      ': ',
                  ))
        ff.close()

        self.made_files = [
            self.cube_file, self.cube_file2, orography_file, land_file,
            ad_file_temperature, ad_file_pressure, ad_file_s_pressure,
            self.config_file
        ]

        self.cube = cube
        self.cube2 = cube2
        self.temperature_on_height_levels = temperature_on_height_levels
        self.ancillary_data = ancillary_data
        self.additional_data = additional_data
        self.time_extract = time_extract
Ejemplo n.º 8
0
def generate_day_constraint_with_window(
        day_of_year, window, calendar='standard'):
    """
    generate two :class:`iris.Constraints <iris.Constraint>` for the time axis:
      1. for the exact day of the year over all years
      2. including all days over all years that lie within day_of_year +/- window
    Args:
    * day_of_year (int):
        day of the year in the given calendar
    * window (int):
        the size of the temporal window around the given day (in days)
    * calendar (str):
        a supported calendar: standard (default), gregorian,
        proleptic_gregorian, noleap, 365_day, all_leap, 366_day, 360_day
    Returns:
        a 2-tuple of :class:`iris.Constraints <iris.Constraint>`
        on the time axis
    """
    if calendar in ['standard', 'gregorian', 'proleptic_gregorian',
                    'all_leap', '366_day']:
        # take a leap year to generate bounds
        start = dt.datetime(2000, 1, 1)
        year_start = PartialDateTime(month=1, day=1)
        year_end = PartialDateTime(month=12, day=31)
        begin = start + dt.timedelta(days=day_of_year - window)
        mid = start + dt.timedelta(days=day_of_year)
        end = start + dt.timedelta(days=day_of_year + window)
        begin = PartialDateTime(month=begin.month, day=begin.day)
        mid = PartialDateTime(month=mid.month, day=mid.day)
        end = PartialDateTime(month=end.month, day=end.day)
    elif calendar in ['noleap', '365_day']:
        # take a non-leap year to generate bounds
        start = dt.datetime(1999, 1, 1)
        year_start = PartialDateTime(month=1, day=1)
        year_end = PartialDateTime(month=12, day=31)
        begin = start + dt.timedelta(days=day_of_year - window)
        mid = start + dt.timedelta(days=day_of_year)
        end = start + dt.timedelta(days=day_of_year + window)
        begin = PartialDateTime(month=begin.month, day=begin.day)
        mid = PartialDateTime(month=mid.month, day=mid.day)
        end = PartialDateTime(month=end.month, day=end.day)
    elif calendar in ['360_day']:
        # construct the bounds manually
        year_start = PartialDateTime(month=1, day=1)
        year_end = PartialDateTime(month=12, day=30)
        month, day_of_month = divmod(day_of_year, 30)
        # add one as month and day counting starts at 1
        month += 1
        day_of_month += 1
        mid = PartialDateTime(month=month, day=day_of_month)
        start_day = day_of_month - window
        end_day = day_of_month + window
        begin = PartialDateTime(month=month, day=start_day) \
            if start_day >= 1 else \
            PartialDateTime(month=(month - 2) % 12 + 1,
                            day=start_day + 30)
        end = PartialDateTime(month=month, day=end_day) if end_day <= 30 else \
            PartialDateTime(month=month % 12 + 1, day=end_day - 30)
    else:
        raise ValueError("calendar '{}' not supported".format(calendar))

    day_constraint = Constraint(time=lambda cell: cell.point == mid)
    if begin.month <= end.month:
        window_constraint = Constraint(
            time=lambda cell: begin <= cell.point <= end)
    else:
        window_constraint = Constraint(
            time=lambda cell: year_start <= cell.point <= end or
            begin <= cell.point <= year_end)

    return day_constraint, window_constraint
Ejemplo n.º 9
0
def cell_statistics(input_cubes,track,mask,aggregators,cell,output_path='./',output_name='Profiles',width=10000,z_coord='model_level_number',dimensions=['x','y'],**kwargs):
    from iris.cube import Cube,CubeList
    from iris.coords import AuxCoord
    from iris import Constraint,save    
    
    # If input is single cube, turn into cubelist
    if type(input_cubes) is Cube:
        input_cubes=CubeList([input_cubes])
    
    logging.debug('Start calculating profiles for cell '+str(cell))
    track_i=track[track['cell']==cell]
    
    cubes_profile={}
    for aggregator in aggregators:
        cubes_profile[aggregator.name()]=CubeList()
        
    for time_i in track_i['time'].values:
        constraint_time = Constraint(time=time_i)
        
        mask_i=mask.extract(constraint_time)
        mask_cell_i=mask_cell(mask_i,cell,track_i,masked=False)
        mask_cell_surface_i=mask_cell_surface(mask_i,cell,track_i,masked=False,z_coord=z_coord)

        x_dim=mask_cell_surface_i.coord_dims('projection_x_coordinate')[0]
        y_dim=mask_cell_surface_i.coord_dims('projection_y_coordinate')[0]
        x_coord=mask_cell_surface_i.coord('projection_x_coordinate')
        y_coord=mask_cell_surface_i.coord('projection_y_coordinate')
    
        if (mask_cell_surface_i.core_data()>0).any():
            box_mask_i=get_bounding_box(mask_cell_surface_i.core_data(),buffer=1)
    
            box_mask=[[x_coord.points[box_mask_i[x_dim][0]],x_coord.points[box_mask_i[x_dim][1]]],
                     [y_coord.points[box_mask_i[y_dim][0]],y_coord.points[box_mask_i[y_dim][1]]]]
        else:
            box_mask=[[np.nan,np.nan],[np.nan,np.nan]]
    
        x=track_i[track_i['time'].values==time_i]['projection_x_coordinate'].values[0]
        y=track_i[track_i['time'].values==time_i]['projection_y_coordinate'].values[0]

        box_slice=[[x-width,x+width],[y-width,y+width]]
               
        x_min=np.nanmin([box_mask[0][0],box_slice[0][0]])
        x_max=np.nanmax([box_mask[0][1],box_slice[0][1]])
        y_min=np.nanmin([box_mask[1][0],box_slice[1][0]])
        y_max=np.nanmax([box_mask[1][1],box_slice[1][1]])

        constraint_x=Constraint(projection_x_coordinate=lambda cell: int(x_min) < cell < int(x_max))
        constraint_y=Constraint(projection_y_coordinate=lambda cell: int(y_min) < cell < int(y_max))

        constraint=constraint_time & constraint_x & constraint_y
#       Mask_cell_surface_i=mask_cell_surface(Mask_w_i,cell,masked=False,z_coord='model_level_number')
        mask_cell_i=mask_cell_i.extract(constraint)
        mask_cell_surface_i=mask_cell_surface_i.extract(constraint)

        input_cubes_i=input_cubes.extract(constraint)
        for cube in input_cubes_i:
            cube_masked=mask_cube_cell(cube,mask_cell_i,cell,track_i)
            coords_remove=[]
            for coordinate in cube_masked.coords(dim_coords=False):

                if coordinate.name() not in dimensions:
                    for dim in dimensions:
                        if set(cube_masked.coord_dims(coordinate)).intersection(set(cube_masked.coord_dims(dim))):
                            coords_remove.append(coordinate.name())
            for coordinate in set(coords_remove):
                cube_masked.remove_coord(coordinate)            
            
            for aggregator in aggregators:
                cube_collapsed=cube_masked.collapsed(dimensions,aggregator,**kwargs)
                #remove all collapsed coordinates (x and y dim, scalar now) and keep only time as all these coordinates are useless
                for coordinate in cube_collapsed.coords():
                    if not cube_collapsed.coord_dims(coordinate):
                        if coordinate.name() is not 'time':
                            cube_collapsed.remove_coord(coordinate)
                logging.debug(str(cube_collapsed))
                cubes_profile[aggregator.name()].append(cube_collapsed)


    minutes=(track_i['time_cell']/pd.Timedelta(minutes=1)).values
    latitude=track_i['latitude'].values
    longitude=track_i['longitude'].values
    minutes_coord=AuxCoord(minutes,long_name='cell_time',units='min')
    latitude_coord=AuxCoord(latitude,long_name='latitude',units='degrees')
    longitude_coord=AuxCoord(longitude,long_name='longitude',units='degrees')
    
    for aggregator in aggregators:
        cubes_profile[aggregator.name()]=cubes_profile[aggregator.name()].merge()
        for cube in cubes_profile[aggregator.name()]:
            cube.add_aux_coord(minutes_coord,data_dims=cube.coord_dims('time'))
            cube.add_aux_coord(latitude_coord,data_dims=cube.coord_dims('time'))
            cube.add_aux_coord(longitude_coord,data_dims=cube.coord_dims('time'))
        os.makedirs(os.path.join(output_path,output_name,aggregator.name()),exist_ok=True)
        savefile=os.path.join(output_path,output_name,aggregator.name(),output_name+'_'+ aggregator.name()+'_'+str(int(cell))+'.nc')
        save(cubes_profile[aggregator.name()],savefile)
Ejemplo n.º 10
0
def bbox_extract_1Dcoords(cube, bbox):
    lat = Constraint(latitude=lambda cell: bbox[1] <= cell < bbox[3])
    lon = Constraint(longitude=lambda cell: bbox[0] <= cell <= bbox[2])
    cube = cube.extract(lon & lat)
    return cube
Ejemplo n.º 11
0
    description="Converts the given file from sigma level to pressure level")
parser.add_argument("filename", type=str, help="File name of input")
args = parser.parse_args()

# Load surface pressure cube
try:
    pₛ = load_cube(args.filename, "surface_air_pressure")
except ConstraintMismatchError:
    print(f"Couldn't find surface pressure cube in file {args.filename}")
    print("Is it named correctly? (surface_air_pressure)")
    raise SystemExit

# Load other cubes
cubes = load(
    args.filename,
    Constraint(cube_func=lambda n: n.long_name != "surface_air_pressure"))
print("Processing the following cubes...")
print(cubes)

# Get sigma coordinates
σ_levels = cubes[0].coord("atmosphere_sigma_coordinate").points

print(f"Processing {args.filename} from sigma levels " +
      f"\n{', '.join(str(σ) for σ in σ_levels)}" +
      f"\nto pressure levels\n{', '.join(str(p) + ' hPa' for p in p_levels)}")

# Construct pressure level Iris DimCoord
pressure_level_coord = DimCoord(p_levels,
                                units=Unit("hPa"),
                                long_name="air_pressure",
                                var_name="lev")
Ejemplo n.º 12
0
def time_constraint(cube, start, stop):
    """Slice time by constraint."""
    begin = lambda cell: cell >= start
    end = lambda cell: cell <= stop
    constraint = Constraint(begin & end)
    return cube.extract(constraint)
Ejemplo n.º 13
0
def plot(dirname, field, label_in):
    with catch_warnings():
        label = label_in

        # SPEEDY output is not CF compliant
        simplefilter('ignore', UserWarning)

        print(f'Plotting {field}')
        analy_ps = load_cube(f'{dirname}/mean.nc', field)
        nature_ps = load_cube('nature.nc', field)

        # Get minimum duration of data
        time = min(
            analy_ps.coord('time').points[-1],
            nature_ps.coord('time').points[-1])
        analy_ps = analy_ps.extract(Constraint(time=lambda t: t < time))
        nature_ps = nature_ps.extract(Constraint(time=lambda t: t < time))

        # Generate x date axis
        with FUTURE.context(cell_datetime_objects=True):
            time_axis = [x.point for x in nature_ps.coord('time').cells()]

        coords = ['latitude', 'longitude', 'atmosphere_sigma_coordinate']
        rmse = ((analy_ps - nature_ps)**2).collapsed(coords, MEAN)**0.5

        label = label_in + ' RMSE'

        # Try to compute time mean after March 1st 00:00 (if data doesn't go up to March 1st yet,
        # an AttributeError will be thrown - this is ignored
        try:
            with FUTURE.context(cell_datetime_objects=True):
                after_march = rmse.extract(
                    Constraint(
                        time=lambda t: t > PartialDateTime(month=3, day=1)))
                mean = float(after_march.collapsed('time', MEAN).data)
            label += f' ({mean:{4}.{3}})'
        except AttributeError:
            pass

        rmse_h, = plt.plot(time_axis, rmse.data, label=label)

        analy_cb = load_cube(f'{dirname}/sprd.nc', field)
        analy_cb = analy_cb.extract(Constraint(time=lambda t: t < time))

        sprd = analy_cb.collapsed(coords, MEAN)

        label = label_in + ' spread'

        # Try to compute time mean after March 1st 00:00 (if data doesn't go up to March 1st yet,
        # an AttributeError will be thrown - this is ignored
        try:
            with FUTURE.context(cell_datetime_objects=True):
                after_march = sprd.extract(
                    Constraint(
                        time=lambda t: t > PartialDateTime(month=3, day=1)))
                mean = float(after_march.collapsed('time', MEAN).data)
            label += f'  ({mean:{4}.{3}})'
        except AttributeError:
            pass

        sprd_h, = plt.plot(time_axis,
                           sprd.data,
                           linestyle='--',
                           label=label,
                           color=rmse_h.get_color())

        return [rmse_h, sprd_h]
Ejemplo n.º 14
0
def main(argv=None):
    """Extrapolate data forward in time."""

    parser = ArgParser(
        description="Extrapolate input data to required lead times.")
    parser.add_argument("input_filepath",
                        metavar="INPUT_FILEPATH",
                        type=str,
                        help="Path to input NetCDF file.")

    group = parser.add_mutually_exclusive_group()
    group.add_argument("--output_dir",
                       metavar="OUTPUT_DIR",
                       type=str,
                       default="",
                       help="Directory to write output files.")
    group.add_argument("--output_filepaths",
                       nargs="+",
                       type=str,
                       help="List of full paths to output nowcast files, in "
                       "order of increasing lead time.")

    optflw = parser.add_argument_group('Advect using files containing the x '
                                       ' and y components of the velocity')
    optflw.add_argument("--eastward_advection_filepath",
                        type=str,
                        help="Path"
                        " to input file containing Eastward advection "
                        "velocities.")
    optflw.add_argument("--northward_advection_filepath",
                        type=str,
                        help="Path"
                        " to input file containing Northward advection "
                        "velocities.")

    speed = parser.add_argument_group('Advect using files containing speed and'
                                      ' direction')
    speed.add_argument("--advection_speed_filepath",
                       type=str,
                       help="Path"
                       " to input file containing advection speeds,"
                       " usually wind speeds, on multiple pressure levels.")
    speed.add_argument("--advection_direction_filepath",
                       type=str,
                       help="Path to input file containing the directions from"
                       " which advection speeds are coming (180 degrees from"
                       " the direction in which the speed is directed). The"
                       " directions should be on the same grid as the input"
                       " speeds, including the same vertical levels.")
    speed.add_argument("--pressure_level",
                       type=int,
                       default=75000,
                       help="The"
                       " pressure level in Pa to extract from the multi-level"
                       " advection_speed and advection_direction files. The"
                       " velocities at this level are used for advection.")
    parser.add_argument("--orographic_enhancement_filepaths",
                        nargs="+",
                        type=str,
                        default=None,
                        help="List or wildcarded "
                        "file specification to the input orographic "
                        "enhancement files. Orographic enhancement files are "
                        "compulsory for precipitation fields.")
    parser.add_argument("--json_file",
                        metavar="JSON_FILE",
                        default=None,
                        help="Filename for the json file containing "
                        "required changes to the metadata. Information "
                        "describing the intended contents of the json file "
                        "is available in "
                        "improver.utilities.cube_metadata.amend_metadata."
                        "Every output cube will have the metadata_dict "
                        "applied. Defaults to None.",
                        type=str)
    parser.add_argument("--max_lead_time",
                        type=int,
                        default=360,
                        help="Maximum lead time required (mins).")
    parser.add_argument("--lead_time_interval",
                        type=int,
                        default=15,
                        help="Interval between required lead times (mins).")

    accumulation_args = parser.add_argument_group(
        'Calculate accumulations from advected fields')
    accumulation_args.add_argument(
        "--accumulation_fidelity",
        type=int,
        default=0,
        help="If set, this CLI will additionally return accumulations"
        " calculated from the advected fields. This fidelity specifies the"
        " time interval in minutes between advected fields that is used to"
        " calculate these accumulations. This interval must be a factor of"
        " the lead_time_interval.")
    accumulation_args.add_argument(
        "--accumulation_period",
        type=int,
        default=15,
        help="The period over which the accumulation is calculated (mins). "
        "Only full accumulation periods will be computed. At lead times "
        "that are shorter than the accumulation period, no accumulation "
        "output will be produced.")
    accumulation_args.add_argument(
        "--accumulation_units",
        type=str,
        default='m',
        help="Desired units in which the accumulations should be expressed,"
        "e.g. mm")

    # Load Cubes
    args = parser.parse_args(args=argv)

    metadata_dict = load_json_or_none(args.json_file)

    upath, vpath = (args.eastward_advection_filepath,
                    args.northward_advection_filepath)
    spath, dpath = (args.advection_speed_filepath,
                    args.advection_direction_filepath)

    # load files and initialise advection plugin
    input_cube = load_cube(args.input_filepath)
    orographic_enhancement_cube = load_cube(
        args.orographic_enhancement_filepaths, allow_none=True)

    speed_cube = direction_cube = ucube = vcube = None
    if (upath and vpath) and not (spath or dpath):
        ucube = load_cube(upath)
        vcube = load_cube(vpath)
    elif (spath and dpath) and not (upath or vpath):
        level_constraint = Constraint(pressure=args.pressure_level)
        try:
            speed_cube = load_cube(spath, constraints=level_constraint)
            direction_cube = load_cube(dpath, constraints=level_constraint)
        except ValueError as err:
            raise ValueError(
                '{} Unable to extract specified pressure level from given '
                'speed and direction files.'.format(err))
    else:
        raise ValueError('Cannot mix advection component velocities with speed'
                         ' and direction')

    # Process Cubes
    accumulation_cubes, forecast_to_return = process(
        input_cube, ucube, vcube, speed_cube, direction_cube,
        orographic_enhancement_cube, metadata_dict, args.max_lead_time,
        args.lead_time_interval, args.accumulation_fidelity,
        args.accumulation_period, args.accumulation_units)

    # Save Cube
    if args.output_filepaths and \
            len(args.output_filepaths) != len(forecast_to_return):
        raise ValueError("Require exactly one output file name for each "
                         "forecast lead time")
    for i, cube in enumerate(forecast_to_return):
        # save to a suitably-named output file
        if args.output_filepaths:
            file_name = args.output_filepaths[i]
        else:
            file_name = os.path.join(args.output_dir, generate_file_name(cube))
        save_netcdf(cube, file_name)

    if args.accumulation_fidelity > 0:
        # return accumulation cubes
        for i, cube in enumerate(accumulation_cubes):
            file_name = os.path.join(args.output_dir, generate_file_name(cube))
            save_netcdf(cube, file_name)
    def setUp(self):
        """Create a cube containing a regular lat-lon grid.

        Data is formatted to increase linearly in x/y dimensions,
        e.g.
              0 1 2 3
              1 2 3 4
              2 3 4 5
              3 4 5 6
        """

        data = np.arange(0, 20, 1)
        for i in range(1, 20):
            data = np.append(data, np.arange(i, 20 + i))

        data.resize(1, 20, 20)
        latitudes = np.linspace(-90, 90, 20)
        longitudes = np.linspace(-180, 180, 20)
        latitude = DimCoord(latitudes,
                            standard_name='latitude',
                            units='degrees',
                            coord_system=GeogCS(6371229.0))
        longitude = DimCoord(longitudes,
                             standard_name='longitude',
                             units='degrees',
                             coord_system=GeogCS(6371229.0))

        # Use time of 2017-02-17 06:00:00
        time = DimCoord([1487311200],
                        standard_name='time',
                        units=cf_units.Unit(
                            'seconds since 1970-01-01 00:00:00',
                            calendar='gregorian'))

        time_dt = dt(2017, 2, 17, 6, 0)
        time_extract = Constraint(time=PartialDateTime(
            time_dt.year, time_dt.month, time_dt.day, time_dt.hour))
        forecast_ref_time = time[0].copy()
        forecast_ref_time.rename('forecast_reference_time')

        height = AuxCoord([1.5], standard_name='height', units='m')

        cube = Cube(data,
                    standard_name="air_temperature",
                    dim_coords_and_dims=[(time, 0), (latitude, 1),
                                         (longitude, 2)],
                    units="K")
        cube.add_aux_coord(forecast_ref_time)
        cube.add_aux_coord(height)
        cube.attributes['institution'] = 'Met Office'

        orography = Cube(np.ones((20, 20)),
                         long_name="surface_altitude",
                         dim_coords_and_dims=[(latitude, 0), (longitude, 1)],
                         units="m")

        # Western half of grid at altitude 0, eastern half at 10.
        # Note that the pressure_on_height_levels data is left unchanged,
        # so it is as if there is a sharp front running up the grid with
        # differing pressures on either side at equivalent heights above
        # the surface (e.g. east 1000hPa at 0m AMSL, west 1000hPa at 10m AMSL).
        # So there is higher pressure in the west.
        orography.data[0:10] = 0
        orography.data[10:] = 10
        ancillary_data = {}
        ancillary_data.update({'orography': orography})

        # Create additional vertical data used to calculate temperature lapse
        # rates from model levels.

        t_level0 = np.ones((1, 20, 20)) * 20.
        t_level1 = np.ones((1, 20, 20)) * 10.
        t_level2 = np.ones((1, 20, 20)) * 0.
        t_data = np.vstack((t_level0, t_level1, t_level2))
        t_data.resize((1, 3, 20, 20))

        p_level0 = np.ones((1, 20, 20)) * 1000.
        p_level1 = np.ones((1, 20, 20)) * 900.
        p_level2 = np.ones((1, 20, 20)) * 800.
        p_data = np.vstack((p_level0, p_level1, p_level2))
        p_data.resize((1, 3, 20, 20))

        height = DimCoord([0., 50., 100.], standard_name='height', units='m')

        temperature_on_height_levels = Cube(
            t_data,
            long_name="temperature_on_height_levels",
            dim_coords_and_dims=[(time, 0), (height, 1), (latitude, 2),
                                 (longitude, 3)],
            units="degree_Celsius")

        pressure_on_height_levels = Cube(p_data,
                                         long_name="pressure_on_height_levels",
                                         dim_coords_and_dims=[(time, 0),
                                                              (height, 1),
                                                              (latitude, 2),
                                                              (longitude, 3)],
                                         units="hPa")

        surface_pressure = Cube(p_data[0, 0].reshape(1, 20, 20),
                                long_name="surface_pressure",
                                dim_coords_and_dims=[(time, 0), (latitude, 1),
                                                     (longitude, 2)],
                                units="hPa")

        with iris.FUTURE.context(cell_datetime_objects=True):
            ad = {
                'temperature_on_height_levels':
                temperature_on_height_levels.extract(time_extract),
                'pressure_on_height_levels':
                pressure_on_height_levels.extract(time_extract),
                'surface_pressure':
                surface_pressure.extract(time_extract)
            }

        sites = OrderedDict()
        sites.update({
            '100': {
                'latitude': 4.74,
                'longitude': 9.47,
                'altitude': 10,
                'utc_offset': 0,
                'wmo_site': 0
            }
        })

        neighbour_list = np.empty(1,
                                  dtype=[('i', 'i8'), ('j', 'i8'),
                                         ('dz', 'f8'), ('edgepoint', 'bool_')])

        neighbour_list[0] = 10, 10, 0, False

        self.kwargs = {
            'upper_level': 2,
            'lower_level': 1,
            'dz_tolerance': 2.,
            'dthetadz_threshold': 0.02,
            'dz_max_adjustment': 70.
        }

        self.cube = cube
        self.ancillary_data = ancillary_data
        self.ad = ad
        self.sites = sites
        self.time_extract = time_extract
        self.neighbour_list = neighbour_list
        self.latitudes = latitudes
        self.latitude = latitude
        self.forecast_ref_time = forecast_ref_time
Ejemplo n.º 16
0
def calculate_rams_LWup_TOA(filenames,**kwargs):
    from iris import Constraint
    LWUP=loadramscube(filenames,'LWUP',**kwargs)
    LWup_TOA=LWUP.extract(Constraint(model_level_number=LWUP.coord('model_level_number').points[-1]))
    LWup_TOA.rename('LWup_TOA')
    return LWup_TOA
Ejemplo n.º 17
0
def parse_constraint_list(
    constraints: List[str], units: Optional[List[str]] = None
) -> Tuple[Constraint, Optional[Dict], Optional[float], Optional[Dict]]:
    """
    For simple constraints of a key=value format, these are passed in as a
    list of strings and converted to key-value pairs prior to creating the
    constraints.
    For more complex constraints, the list of strings given as input
    are evaluated by parsing for specific identifiers and then the constraints
    are created as required.
    The simple key-value pairs and other constraints are merged into a single
    constraint.

    Args:
        constraints:
            List of string constraints with keys and values split by "=":
            e.g: ["kw1=val1", "kw2 = val2", "kw3=val3"], where the vals
            could include ranges e.g. [0:20] or ranges with a step value e.g.
            [0:20:3].
        units:
            List of units (as strings) corresponding to each coordinate in the
            list of constraints.  One or more "units" may be None, and units
            may only be associated with coordinate constraints.

    Returns:
        - A combination of all the constraints that were supplied.
        - A dictionary of unit keys and values
        - A list containing the min and max values for a longitude constraint
        - A dictionary of coordinate and the step value, i.e. a step of 2 will
          skip every other point
    """

    if units is None:
        list_units = len(constraints) * [None]
        units_dict = None
    else:
        if len(units) != len(constraints):
            msg = "units list must match constraints"
            raise ValueError(msg)
        list_units = units
        units_dict = {}

    simple_constraints_dict = {}
    complex_constraints = []
    longitude_constraint = None
    thinning_values = {}
    for constraint_pair, unit_val in zip(constraints, list_units):
        key, value = constraint_pair.split("=", 1)
        key = key.strip(" ")
        value = value.strip(" ")

        if ":" in value:
            range_dict = parse_range_string_to_dict(value)

            # longitude is a circular coordinate, so needs to be treated in a
            # different way to a normal constraint
            if key == "longitude":
                longitude_constraint = [
                    FLOAT_DTYPE(range_dict[k]) for k in ["min", "max"]
                ]
            else:
                complex_constraints.append(
                    create_sorted_lambda_constraint(
                        key, [range_dict["min"], range_dict["max"]]
                    )
                )
            if range_dict.get("step", None):
                thinning_values[key] = int(range_dict["step"])
        else:
            try:
                typed_value = literal_eval(value)
            except ValueError:
                simple_constraints_dict[key] = value
            else:
                simple_constraints_dict[key] = create_constraint(typed_value)

        if unit_val is not None and unit_val.capitalize() != "None":
            units_dict[key] = unit_val.strip(" ")

    if simple_constraints_dict:
        simple_constraints = Constraint(**simple_constraints_dict)
    else:
        simple_constraints = None

    constraints = simple_constraints
    for constr in complex_constraints:
        constraints = constraints & constr

    return constraints, units_dict, longitude_constraint, thinning_values
Ejemplo n.º 18
0
def calculate_rams_LWup_sfc(filenames,**kwargs):
    from iris import Constraint
    LWUP=loadramscube(filenames,'LWUP',**kwargs)
    LWup_sfc=LWUP.extract(Constraint(model_level_number=0))
    LWup_sfc.rename('LWup_sfc')
    return LWup_sfc
Ejemplo n.º 19
0
def plot(dirname, label_in):
    with catch_warnings():
        label = label_in

        # SPEEDY output is not CF compliant
        simplefilter('ignore', UserWarning)

        print(f'Plotting {fields[0]}')
        analy_ps = load_cube(f'{dirname}/mean.nc', fields[0])
        nature_ps = load_cube('nature.nc', fields[0])

        # Get minimum duration of data
        time = min(
            analy_ps.coord('time').points[-1],
            nature_ps.coord('time').points[-1])
        analy_ps = analy_ps.extract(Constraint(time=lambda t: t < time))
        nature_ps = nature_ps.extract(Constraint(time=lambda t: t < time))

        # Generate x date axis
        with FUTURE.context(cell_datetime_objects=True):
            time_axis = [x.point for x in nature_ps.coord('time').cells()]

        rmse = (((analy_ps - nature_ps) / obs_errors[0])**2).collapsed(
            ['latitude', 'longitude'], SUM)

        for field, obs_error in zip(fields[1:], obs_errors[1:]):
            print(f'Plotting {field}')
            for lev in levels:
                # Build iris constraint object
                lev_con = Constraint(atmosphere_sigma_coordinate=lev)

                analy_ps = load_cube(f'{dirname}/mean.nc', field)
                nature_ps = load_cube(f'nature.nc', field)

                analy_ps = analy_ps.extract(
                    Constraint(time=lambda t: t < time) & lev_con)
                nature_ps = nature_ps.extract(
                    Constraint(time=lambda t: t < time) & lev_con)

                rmse += (((analy_ps - nature_ps) / obs_error)**2).collapsed(
                    ['latitude', 'longitude'], SUM)

        # Divide by the total number of fields (4 3D fields x 8 levels + 1 2D field) and gridpoints (96*48)
        rmse = rmse / (33.0 * 96.0 * 48.0)

        # Square root to get RMSE
        rmse = rmse**0.5

        label = label_in + ' RMSE'

        # Try to compute time mean after March 1st 00:00 (if data doesn't go up to March 1st yet,
        # an AttributeError will be thrown - this is ignored
        try:
            with FUTURE.context(cell_datetime_objects=True):
                after_march = rmse.extract(
                    Constraint(
                        time=lambda t: t > PartialDateTime(month=3, day=1)))
                mean = float(after_march.collapsed('time', MEAN).data)
            label += f' ({mean:{4}.{3}})'
        except AttributeError:
            pass

        rmse_h, = plt.plot(time_axis, rmse.data, label=label)

        for field, obs_error in zip(fields, obs_errors):
            print(f'Plotting {field}')
            analy_cb = load_cube(f'{dirname}/sprd.nc', field)
            analy_cb = analy_cb.extract(Constraint(time=lambda t: t < time))
            if field == 'Surface Pressure [Pa]':
                try:
                    sprd += (analy_cb / obs_error).collapsed(
                        ['latitude', 'longitude'], SUM)
                except NameError:
                    sprd = (analy_cb / obs_error).collapsed(
                        ['latitude', 'longitude'], SUM)
            else:
                for lev in levels:
                    analy_cb_lev = analy_cb.extract(
                        Constraint(atmosphere_sigma_coordinate=lev))
                    try:
                        sprd += (analy_cb_lev / obs_error).collapsed(
                            ['latitude', 'longitude'], SUM)
                    except NameError:
                        sprd = (analy_cb_lev / obs_error).collapsed(
                            ['latitude', 'longitude'], SUM)

        # Divide by the total number of fields (4 3D fields x 8 levels + 1 2D field) and gridpoints (96*48)
        sprd = sprd / (33.0 * 96.0 * 48.0)

        label = label_in + ' spread'

        # Try to compute time mean after March 1st 00:00 (if data doesn't go up to March 1st yet,
        # an AttributeError will be thrown - this is ignored
        try:
            with FUTURE.context(cell_datetime_objects=True):
                after_march = sprd.extract(
                    Constraint(
                        time=lambda t: t > PartialDateTime(month=3, day=1)))
                mean = float(after_march.collapsed('time', MEAN).data)
            label += f'  ({mean:{4}.{3}})'
        except AttributeError:
            pass

        sprd_h, = plt.plot(time_axis,
                           sprd.data,
                           linestyle='--',
                           label=label,
                           color=rmse_h.get_color())

        return [rmse_h, sprd_h]
Ejemplo n.º 20
0
def calculate_rams_SWdn_TOA(filenames,**kwargs):
    from iris import Constraint
    SWDN=loadramscube(filenames,'SWDN',**kwargs)
    SWdn_TOA=SWDN.extract(Constraint(model_level_number=SWDN.coord('model_level_number').points[-1]))
    SWdn_TOA.rename('SWdn_TOA')
    return SWdn_TOA
Ejemplo n.º 21
0
def _var_name_constraint(var_name):
    """:mod:`iris.Constraint` using `var_name` of a :mod:`iris.cube.Cube`."""
    return Constraint(cube_func=lambda c: c.var_name == var_name)
Ejemplo n.º 22
0
def calculate_rams_SWdn_sfc(filenames,**kwargs):
    from iris import Constraint
    SWDN=loadramscube(filenames,'SWDN',**kwargs)
    SWdn_sfc=SWDN.extract(Constraint(model_level_number=0))
    SWdn_sfc.rename('SWdn_sfc')
    return SWdn_sfc
Ejemplo n.º 23
0
# <codecell>

times  = hgtprs.coord(axis='T')

resolution = times.attributes['resolution']
print("The time resolution is {} days".format(resolution))

time_step = 24 * resolution
print("The time step between forecast hours is {} hours".format(time_step))

# <codecell>

# But this one makes it worth using iris.

# Set US Bounds for GFS 1 deg data.
lon = Constraint(longitude=lambda x: 220 <= x <= 310)
lat = Constraint(latitude=lambda y: 20 <= y <= 70)

z300 = Constraint(altitude=lambda z: z == 300)
z500 = Constraint(altitude=lambda z: z == 500)
z850 = Constraint(altitude=lambda z: z == 850)
z1000 = Constraint(altitude=lambda z: z == 1000)

hght850 = hgtprs.extract(lon & lat & z850)
# or use the cubes directly.
hght500 = cubes.extract_strict(make_constraint("hgtprs") & lon & lat & z500)
hght1000 = cubes.extract_strict(make_constraint("hgtprs") & lon & lat & z1000)

temp850 = cubes.extract_strict(make_constraint("tmpprs") & lon & lat & z850)

avor500 = cubes.extract_strict(make_constraint("absvprs") & lon & lat & z500)
Ejemplo n.º 24
0
 def setup(
     self, file_path_dict: dict, time_dim_len: int, file_format: str
 ) -> None:
     self.file_path = file_path_dict[time_dim_len][file_format]
     self.time_constr = Constraint(time=lambda cell: cell.point.year < 3)
Ejemplo n.º 25
0
    plt.gca().set_ylim((-90, 90))

    plt.gca().set_yticks([-60, -30, 0, 30, 60], crs=ccrs.PlateCarree())
    plt.gca().set_xticks([-180, -90, 0, 90, 180], crs=ccrs.PlateCarree())

    plt.title(name)


if __name__ == "__main__":
    plt.close("all")
    var = 'od550aer'

    # get test file dictionary
    test_files = get()

    constraint = Constraint(cube_func=lambda c: c.var_name == var)

    #longitudes OK
    cci = load_cube(test_files['models']['aatsr_su_v4.3'],
                    constraint=constraint)

    osuite = load_cube(test_files['models']['ecmwf_osuite'],
                       constraint=constraint)

    # over meridian in -180 <= lon <= 180 definition
    crop_range1 = (-30, 30)
    crop_range2 = (150, 210)

    # First example: use cube intersection method
    cci_intersect1 = cci.intersection(longitude=crop_range1)
    cci_intersect2 = cci.intersection(longitude=crop_range2)
Ejemplo n.º 26
0
def get_coord_constraint(name, value):
    """
    Create an iris.Constraint for coordinate key/value pair
    """
    return Constraint(coord_values={name: value})
Ejemplo n.º 27
0
def subset_data(
    cube: Cube,
    grid_spec: Optional[Dict[str, Dict[str, int]]] = None,
    site_list: Optional[List] = None,
) -> Cube:
    """Extract a spatial cutout or subset of sites from data
    to generate suite reference outputs.

    Args:
        cube:
            Input dataset
        grid_spec:
            Dictionary containing bounding grid points and an integer "thinning
            factor" for each of UK and global grid, to create cutouts.  Eg a
            "thinning factor" of 10 would mean every 10th point being taken for
            the cutout.  The expected dictionary has keys that are spatial coordinate
            names, with values that are dictionaries with "min", "max" and "thin" keys.
        site_list:
            List of WMO site IDs to extract.  These IDs must match the type and format
            of the "wmo_id" coordinate on the input spot cube.

    Returns:
        Subset of input cube as specified by input constraints

    Raises:
        ValueError:
            If site_list is not provided for a spot data cube
        ValueError:
            If the spot data cube does not contain any of the required sites
        ValueError:
            If grid_spec is not provided for a gridded cube
        ValueError:
            If grid_spec does not contain entries for the spatial coordinates on
            the input gridded data
        ValueError:
            If the grid_spec provided does not overlap with the cube domain
    """
    if cube.coords("spot_index"):
        if site_list is None:
            raise ValueError("site_list required to extract from spot data")

        constraint = Constraint(
            coord_values={"wmo_id": lambda x: x in site_list})
        result = cube.extract(constraint)
        if result is None:
            raise ValueError(
                f"Cube does not contain any of the required sites: {site_list}"
            )

    else:
        if grid_spec is None:
            raise ValueError("grid_spec required to extract from gridded data")

        x_coord = cube.coord(axis="x").name()
        y_coord = cube.coord(axis="y").name()

        for coord in [y_coord, x_coord]:
            if coord not in grid_spec:
                raise ValueError(
                    f"Cube coordinates {y_coord}, {x_coord} are not present within "
                    f"{grid_spec.keys()}")

        def _create_cutout(cube, grid_spec):
            """Given a gridded data cube and boundary limits for cutout dimensions,
            create cutout.  Expects cube on either lat-lon or equal area grid.
            """
            x_coord = cube.coord(axis="x").name()
            y_coord = cube.coord(axis="y").name()

            xmin = grid_spec[x_coord]["min"]
            xmax = grid_spec[x_coord]["max"]
            ymin = grid_spec[y_coord]["min"]
            ymax = grid_spec[y_coord]["max"]

            # need to use cube intersection for circular coordinates (longitude)
            if x_coord == "longitude":
                lat_constraint = Constraint(
                    latitude=lambda y: ymin <= y.point <= ymax)
                cutout = cube.extract(lat_constraint)
                if cutout is None:
                    return cutout

                cutout = cutout.intersection(longitude=(xmin, xmax),
                                             ignore_bounds=True)

                # intersection creates a new coordinate with default datatype - we
                # therefore need to re-cast to meet the IMPROVER standard
                cutout.coord("longitude").points = cutout.coord(
                    "longitude").points.astype(FLOAT_DTYPE)
                if cutout.coord("longitude").bounds is not None:
                    cutout.coord("longitude").bounds = cutout.coord(
                        "longitude").bounds.astype(FLOAT_DTYPE)

            else:
                x_constraint = Constraint(
                    projection_x_coordinate=lambda x: xmin <= x.point <= xmax)
                y_constraint = Constraint(
                    projection_y_coordinate=lambda y: ymin <= y.point <= ymax)
                cutout = cube.extract(x_constraint & y_constraint)

            return cutout

        cutout = _create_cutout(cube, grid_spec)

        if cutout is None:
            raise ValueError(
                "Cube domain does not overlap with cutout specified:\n"
                f"{x_coord}: {grid_spec[x_coord]}, {y_coord}: {grid_spec[y_coord]}"
            )

        original_coords = get_dim_coord_names(cutout)
        thin_x = grid_spec[x_coord]["thin"]
        thin_y = grid_spec[y_coord]["thin"]
        result_list = CubeList()
        try:
            for subcube in cutout.slices([y_coord, x_coord]):
                result_list.append(subcube[::thin_y, ::thin_x])
        except ValueError as cause:
            # error is raised if X or Y coordinate are single-valued (non-dimensional)
            if "iterator" in str(cause) and "dimension" in str(cause):
                raise ValueError(
                    "Function does not support single point extraction")
            else:
                raise

        result = result_list.merge_cube()
        enforce_coordinate_ordering(result, original_coords)

    return result
Ejemplo n.º 28
0
import warnings
warnings.filterwarnings('ignore')
from iris import load_cube, Constraint, load

from GLOB import TEST_FILE

if __name__ == "__main__":

    cubes = load(TEST_FILE)
    for cube in cubes:
        print(cube.var_name)

    var = "od550aer"
    var_names = ["od550aer", "od550dust"]

    constraint = Constraint(cube_func=lambda c: c.var_name == var)

    constraint_multivar = Constraint(
        cube_func=lambda c: c.var_name in var_names)

    c = load_cube(TEST_FILE, constraint=constraint)

    multi_cube = load(TEST_FILE, constraint_multivar)

    cc = (constraint
          & Constraint(latitude=lambda x: 20 < x < 30)
          & Constraint(longitude=lambda x: 10 < x < 60))

    cubes = load(TEST_FILE)

    cube = cubes.extract(cc)[0]
Ejemplo n.º 29
0
def extract_cell_cubes_subset_2D(cubelist_in,
                                 mask,
                                 track,
                                 cell,
                                 z_coord='model_level_number'):
    import iris
    from iris import Constraint
    from iris.cube import CubeList
    import numpy as np
    from tobac import mask_cell_surface, get_bounding_box
    from copy import deepcopy

    track_i = track[track['cell'] == cell]

    cubelist_cell_sum = CubeList()

    for time_i in track_i['time'].values:

        logging.debug('start extracting cubes for cell ' + str(cell) +
                      ' and time ' + str(time_i))

        constraint_time = Constraint(time=time_i)
        mask_i = mask.extract(constraint_time)
        mask_cell_surface_i = mask_cell_surface(mask_i,
                                                cell,
                                                track_i,
                                                masked=False,
                                                z_coord=z_coord)

        x_dim = mask_cell_surface_i.coord_dims('projection_x_coordinate')[0]
        y_dim = mask_cell_surface_i.coord_dims('projection_y_coordinate')[0]
        x_coord = mask_cell_surface_i.coord('projection_x_coordinate')
        y_coord = mask_cell_surface_i.coord('projection_y_coordinate')

        if (mask_cell_surface_i.core_data() > 0).any():
            box_mask_i = get_bounding_box(mask_cell_surface_i.core_data(),
                                          buffer=1)

            box_mask = [[
                x_coord.points[box_mask_i[x_dim][0]],
                x_coord.points[box_mask_i[x_dim][1]]
            ],
                        [
                            y_coord.points[box_mask_i[y_dim][0]],
                            y_coord.points[box_mask_i[y_dim][1]]
                        ]]
        else:
            box_mask = [[np.nan, np.nan], [np.nan, np.nan]]

        width = 20
        dx = 500
        x = track_i[track_i['time'].values ==
                    time_i]['projection_x_coordinate'].values[0]
        y = track_i[track_i['time'].values ==
                    time_i]['projection_y_coordinate'].values[0]

        n_add_width = 2

        box_slice = [[
            x - (width + n_add_width) * dx, x + (width + n_add_width) * dx
        ], [y - (width + n_add_width) * dx, y + (width + n_add_width) * dx]]

        x_min = np.nanmin([box_mask[0][0], box_slice[0][0]])
        x_max = np.nanmax([box_mask[0][1], box_slice[0][1]])
        y_min = np.nanmin([box_mask[1][0], box_slice[1][0]])
        y_max = np.nanmax([box_mask[1][1], box_slice[1][1]])

        constraint_x = Constraint(projection_x_coordinate=lambda cell: int(
            x_min) < cell < int(x_max))
        constraint_y = Constraint(projection_y_coordinate=lambda cell: int(
            y_min) < cell < int(y_max))

        constraint = constraint_time & constraint_x & constraint_y

        mask_cell_surface_i = mask_cell_surface_i.extract(constraint_x
                                                          & constraint_y)
        cubelist_i = cubelist_in.extract(constraint)

        cubelist_cell_sum.extend(
            sum_mask_surface(cubelist_i, mask_cell_surface_i))
    logging.debug(str(cubelist_cell_sum))
    cubelist_cell_sum_out = cubelist_cell_sum.merge()
    logging.debug(str(cubelist_cell_sum_out))
    for cube in cubelist_cell_sum_out:
        logging.debug(str(cube))
        logging.debug(str(cube.attributes))
        logging.debug(str(cube.coords()))

    if len(cubelist_cell_sum_out) == 6:
        logging.debug(
            str(
                iris.util.describe_diff(cubelist_cell_sum_out[0],
                                        cubelist_cell_sum_out[3])))
        logging.debug(
            str(
                iris.util.describe_diff(cubelist_cell_sum_out[1],
                                        cubelist_cell_sum_out[4])))
        logging.debug(
            str(
                iris.util.describe_diff(cubelist_cell_sum_out[2],
                                        cubelist_cell_sum_out[5])))

    track_cell = deepcopy(track_i)
    for cube in cubelist_cell_sum_out:
        logging.debug(f'cube.shape: {cube.shape}')
        logging.debug(f'len(track_cell): {len(track_cell)}')
        logging.debug(f'cube.coord("time"): {cube.coord("time")}')
        logging.debug(f'track_cell[time]: {track_cell["time"]}')

        track_cell[cube.name()] = cube.core_data()

    return cubelist_cell_sum_out, track_cell
Ejemplo n.º 30
0
# Change to relevant experiment directory
chdir(f'../experiments/{experiment}')

# Get background ensemble mean
background = load_cube(f'{prec}/gues_mean.nc', field)

# Get observations
observations = load_cube('obs.nc', field)

# Compute innovations
innovations = background - observations

# Extract level if not computing surface pressure innovations
if field != 'Surface Pressure [Pa]':
    innovations = innovations.extract(
        Constraint(atmosphere_sigma_coordinate=sigma))

# Compute time statistics after March 1st 00:00
innovations = innovations.extract(
    Constraint(time=lambda t: t > PartialDateTime(month=3, day=1)))

# Compute mean and standard deviation of innovations
innov_mean = innovations.collapsed('time', MEAN)
innov_std = innovations.collapsed('time', STD_DEV)

# Compute mask for statistical significance (> 1.96σ from 0.0)
significant = abs(
    innov_mean /
    (innov_std / sqrt(innovations.coord('time').points.shape[0]))).data > 1.96

# Plot innovation mean and standard deviation on a map