Exemplo n.º 1
0
    def _update_metadata(diff_cube: Cube, coord_name: str,
                         cube_name: str) -> None:
        """Rename cube, add attribute and cell method to describe difference.

        Args:
            diff_cube
            coord_name
            cube_name
        """
        # Add metadata to indicate that a difference has been calculated.
        # TODO: update metadata for difference when
        #  proper conventions have been agreed upon.
        cell_method = CellMethod("difference",
                                 coords=[coord_name],
                                 intervals="1 grid length")
        diff_cube.add_cell_method(cell_method)
        diff_cube.attributes["form_of_difference"] = "forward_difference"
        diff_cube.rename("difference_of_" + cube_name)
Exemplo n.º 2
0
    def process(standard_landmask: Cube) -> Cube:
        """Read in the interpolated landmask and round values < 0.5 to False
             and values >=0.5 to True.

        Args:
            standard_landmask:
                input landmask on standard grid.

        Returns:
            output landmask of boolean values.
        """
        mask_sea = standard_landmask.data < 0.5
        standard_landmask.data[mask_sea] = False
        mask_land = standard_landmask.data > 0.0
        standard_landmask.data[mask_land] = True
        standard_landmask.data = standard_landmask.data.astype(np.int8)
        standard_landmask.rename("land_binary_mask")
        return standard_landmask
Exemplo n.º 3
0
    def _update_metadata(self, cube: Cube) -> None:
        """Rename the cube and add attributes to the threshold coordinate
        after merging
        """
        threshold_coord = cube.coord(self.threshold_coord_name)
        threshold_coord.attributes.update(
            {"spp__relative_to_threshold": self.comparison_operator.spp_string}
        )
        if cube.cell_methods:
            format_cell_methods_for_probability(cube, self.threshold_coord_name)

        cube.rename(
            "probability_of_{parameter}_{relative_to}_threshold".format(
                parameter=self.threshold_coord_name,
                relative_to=probability_is_above_or_below(cube),
            )
        )
        cube.units = Unit(1)
Exemplo n.º 4
0
    def _update_metadata(self, output_cube: Cube,
                         original_units: Unit) -> None:
        """
        Update output cube name and threshold coordinate

        Args:
            output_cube:
                Cube containing new "between_thresholds" probabilities
            original_units:
                Required threshold-type coordinate units
        """
        new_name = self.cube.name().replace(
            "{}_threshold".format(probability_is_above_or_below(self.cube)),
            "between_thresholds",
        )
        output_cube.rename(new_name)

        new_thresh_coord = output_cube.coord(self.thresh_coord.name())
        new_thresh_coord.convert_units(original_units)
        new_thresh_coord.attributes[
            "spp__relative_to_threshold"] = "between_thresholds"
Exemplo n.º 5
0
def climatology_3d():
    def jan_offset(day, year):
        dt = datetime(year, 1, day) - datetime(1970, 1, 1)
        return dt.total_seconds() / (24.0 * 3600)

    days = range(10, 15)
    years = [[year, year + 10] for year in [2001] * 4]
    days_since = [
        [jan_offset(day, yr1), jan_offset(day, yr2)]
        for (day, [yr1, yr2]) in zip(days, years)
    ]
    time_bounds = np.array(days_since)
    time_points = time_bounds[..., 0]

    lon = np.linspace(-25, 25, 5)
    lat = np.linspace(0, 60, 3)

    time_dim = DimCoord(
        time_points,
        standard_name="time",
        bounds=time_bounds,
        units="days since 1970-01-01 00:00:00-00",
        climatological=True,
    )
    lon_dim = DimCoord(lon, standard_name="longitude", units="degrees")
    lat_dim = DimCoord(lat, standard_name="latitude", units="degrees")

    data_shape = (len(time_points), len(lat), len(lon))
    values = np.zeros(shape=data_shape, dtype=np.int8)
    cube = Cube(values)
    cube.add_dim_coord(time_dim, 0)
    cube.add_dim_coord(lat_dim, 1)
    cube.add_dim_coord(lon_dim, 2)
    cube.rename("climatology test")
    cube.units = "Kelvin"
    cube.add_cell_method(CellMethod("mean over years", coords="time"))

    return cube
Exemplo n.º 6
0
    def make_cube(self, cube, data, sites):
        """
        Construct and return a cube containing the data extracted from the
        grids by the desired method for the sites provided.

        Args:
        -----
        cube : iris.cube.Cube
            The original diagnostic cube from which data has been extracted.

        data : numpy.array
            Array of diagnostic values extracted for the defined sites.

        sites : OrderedDict
            A dictionary containing the properties of spotdata sites.

        Returns:
        --------
        cube : iris.cube.Cube
            An irregularly (i.e. non-gridded) cube of diagnostic data extracted
            at the spotdata sites.

        """
        # Ensure time is a dimension coordinate.
        if 'time' not in cube.dim_coords:
            cube = iris.util.new_axis(cube, 'time')

        n_non_spatial_dimcoords = len(cube.dim_coords) - 2
        non_spatial_dimcoords = cube.dim_coords[0:n_non_spatial_dimcoords]
        cube.coord('time').convert_units('hours since 1970-01-01 00:00:00')

        latitudes = [float(site['latitude']) for site in sites.itervalues()]
        longitudes = [float(site['longitude']) for site in sites.itervalues()]
        altitudes = [
            np.nan_to_num(site['altitude']) for site in sites.itervalues()
        ]
        utc_offsets = [
            float(site['utc_offset']) for site in sites.itervalues()
        ]
        wmo_sites = [site['wmo_site'] for site in sites.itervalues()]

        indices, latitude, longitude, altitude, utc_offset, wmo_site = (
            self._build_coordinates(latitudes, longitudes, altitudes,
                                    utc_offsets, wmo_sites))

        dim_coords = [coord for coord in non_spatial_dimcoords]
        dim_coords.append(indices)
        n_dim_coords = len(dim_coords)
        dim_coords = zip(dim_coords, range(n_dim_coords))
        aux_coords = zip([latitude, longitude, altitude, utc_offset, wmo_site],
                         [n_dim_coords - 1] * 5)

        # Add leading dimension for time.
        data = np.expand_dims(data, axis=0)
        result_cube = Cube(data,
                           long_name=cube.name(),
                           dim_coords_and_dims=dim_coords,
                           aux_coords_and_dims=aux_coords,
                           units=cube.units)

        # Enables use of long_name above for any name, and then moves it
        # to a standard name if possible.
        result_cube.rename(cube.name())
        return result_cube
Exemplo n.º 7
0
    def make_cube(self, cube, data, sites):
        """
        Construct and return a cube containing the data extracted from the
        grids by the desired method for the sites provided.

        Args:
        -----
        cube : iris.cube.Cube
            The original diagnostic cube from which data has been extracted.

        data : numpy.array
            Array of diagnostic values extracted for the defined sites.

        sites : OrderedDict
            A dictionary containing the properties of spotdata sites.

        Returns:
        --------
        cube : iris.cube.Cube
            An irregularly (i.e. non-gridded) cube of diagnostic data extracted
            at the spotdata sites.

        """

        # Ensure time is a dimension coordinate and convert to seconds.
        cube_coords = [coord.name() for coord in cube.dim_coords]
        if 'time' not in cube_coords:
            cube = iris.util.new_axis(cube, 'time')
        cube.coord('time').convert_units('seconds since 1970-01-01 00:00:00')

        cube_coords = [coord.name() for coord in cube.coords()]
        if 'forecast_reference_time' not in cube_coords:
            raise CoordinateNotFoundError(
                'No forecast reference time found on source cube.')
        cube.coord('forecast_reference_time').convert_units(
            'seconds since 1970-01-01 00:00:00')

        # Replicate all non spatial dimension coodinates.
        n_non_spatial_dimcoords = len(cube.dim_coords) - 2
        non_spatial_dimcoords = cube.dim_coords[0:n_non_spatial_dimcoords]
        dim_coords = [coord for coord in non_spatial_dimcoords]

        # Add an index coordinate as a dimension coordinate.
        indices = self._build_coordinate(np.arange(len(sites)),
                                         'index',
                                         data_type=int)
        dim_coords.append(indices)

        # Record existing scalar coordinates on source cube. Aux coords
        # associated with dimensions cannot be preserved as the dimensions will
        # be reshaped and the auxiliarys no longer compatible.
        # Forecast period is ignored for the case where the input data has
        # an existing forecast_period scalar coordinate.
        scalar_coordinates = [
            coord.name() for coord in cube.coords(dimensions=[])
            if coord.name() != 'forecast_period'
        ]

        # Build a forecast_period dimension.
        forecast_periods = (cube.coord('time').points -
                            cube.coord('forecast_reference_time').points)
        forecast_period = self._build_coordinate(forecast_periods,
                                                 'forecast_period',
                                                 units='seconds')

        # Build the new auxiliary coordinates.
        crds = self._aux_coords_to_make()
        aux_crds = []
        for key, kwargs in zip(crds.keys(), crds.itervalues()):
            aux_data = np.array([entry[key] for entry in sites.itervalues()])
            crd = self._build_coordinate(aux_data, key, **kwargs)
            aux_crds.append(crd)

        # Construct zipped lists of coordinates and indices. New aux coords are
        # associated with the index dimension.
        n_dim_coords = len(dim_coords)
        dim_coords = zip(dim_coords, range(n_dim_coords))
        aux_coords = zip(aux_crds, [n_dim_coords - 1] * len(aux_crds))

        # Copy other cube metadata.
        metadata_dict = copy.deepcopy(cube.metadata._asdict())

        # Add leading dimension for time to the data array.
        data = np.expand_dims(data, axis=0)
        result_cube = Cube(data,
                           dim_coords_and_dims=dim_coords,
                           aux_coords_and_dims=aux_coords,
                           **metadata_dict)

        # Add back scalar coordinates from the original cube.
        for coord in scalar_coordinates:
            result_cube.add_aux_coord(cube.coord(coord))

        result_cube.add_aux_coord(forecast_period, cube.coord_dims('time'))

        # Enables use of long_name above for any name, and then moves it
        # to a standard name if possible.
        result_cube.rename(cube.name())

        # Promote any statistical coordinates to be first.
        result_cube = self.make_stat_coordinate_first(result_cube)
        return result_cube