Ejemplo n.º 1
0
def global_avg(data, weights=None, dims=['lon', 'lat']):
    """ Compute (area-weighted) global average over a DataArray
    or Dataset. If `weights` are not passed, they will be computed
    by using the areas of each grid cell in the dataset.

    .. note::
        Handles missing values (nans and infs).

    """

    if isinstance(data, DataArray):

        if weights is None:  # Compute gaussian weights in latitude
            weights = area_grid(data.lon, data.lat)
            # Saving for later - compute latitudinal weighting
            # gw = weights.sum('lon')
            # weights = 2.*gw/gw.sum('lat')

        weights = weights.where(xu.isfinite(data))
        total_weights = weights.sum(dims)

        return (data * weights).sum(dims) / total_weights

    elif isinstance(data, Dataset):

        # Create a new temporary Dataset
        new_data = Dataset()

        # Iterate over the contents of the original Dataset,
        # which are all DataArrays, and compute the global avg
        # on those elements.
        for v in data.data_vars:
            coords = data[v].coords
            if not ('lon' in coords):
                new_data[v] = data[v]
            else:
                new_data[v] = global_avg(data[v], weights)

        # Collapse remaining lat, lon dimensions if they're here
        leftover_dims = [d for d in dims if d in new_data.coords]
        if leftover_dims:
            new_data = new_data.sum(leftover_dims)
        return new_data
Ejemplo n.º 2
0
def global_avg(data, weights=None, dims=['lon', 'lat']):
    """ Compute (area-weighted) global average over a DataArray
    or Dataset. If `weights` are not passed, they will be computed
    by using the areas of each grid cell in the dataset.

    .. note::
        Handles missing values (nans and infs).

    """

    if isinstance(data, DataArray):

        if weights is None:  # Compute gaussian weights in latitude
            weights = area_grid(data.lon, data.lat)
            # Saving for later - compute latitudinal weighting
            # gw = weights.sum('lon')
            # weights = 2.*gw/gw.sum('lat')

        weights = weights.where(xu.isfinite(data))
        total_weights = weights.sum(dims)

        return (data*weights).sum(dims)/total_weights

    elif isinstance(data, Dataset):

        # Create a new temporary Dataset
        new_data = Dataset()

        # Iterate over the contents of the original Dataset,
        # which are all DataArrays, and compute the global avg
        # on those elements.
        for v in data.data_vars:
            coords = data[v].coords
            if not ('lon' in coords):
                new_data[v] = data[v]
            else:
                new_data[v] = global_avg(data[v], weights)

        # Collapse remaining lat, lon dimensions if they're here
        leftover_dims = [d for d in dims if d in new_data.coords]
        if leftover_dims:
            new_data = new_data.sum(leftover_dims)
        return new_data
Ejemplo n.º 3
0
    def __call__(self, datasets, optional_datasets=None, **info):
        if len(datasets) != 3:
            raise ValueError("Expected 3 datasets, got %d" % (len(datasets), ))
        if not all(x.shape == datasets[0].shape for x in datasets[1:]) or \
                (optional_datasets and
                 optional_datasets[0].shape != datasets[0].shape):
            raise IncompatibleAreas('RatioSharpening requires datasets of '
                                    'the same size. Must resample first.')

        new_attrs = {}
        if optional_datasets:
            datasets = self.check_areas(datasets + optional_datasets)
            high_res = datasets[-1]
            p1, p2, p3 = datasets[:3]
            if 'rows_per_scan' in high_res.attrs:
                new_attrs.setdefault('rows_per_scan',
                                     high_res.attrs['rows_per_scan'])
            new_attrs.setdefault('resolution', high_res.attrs['resolution'])
            if self.high_resolution_band == "red":
                LOG.debug("Sharpening image with high resolution red band")
                ratio = high_res / p1
                # make ratio a no-op (multiply by 1) where the ratio is NaN or
                # infinity or it is negative.
                ratio = ratio.where(xu.isfinite(ratio) | (ratio >= 0), 1.)
                r = high_res
                g = p2 * ratio
                b = p3 * ratio
                g.attrs = p2.attrs.copy()
                b.attrs = p3.attrs.copy()
            elif self.high_resolution_band == "green":
                LOG.debug("Sharpening image with high resolution green band")
                ratio = high_res / p2
                ratio = ratio.where(xu.isfinite(ratio) | (ratio >= 0), 1.)
                r = p1 * ratio
                g = high_res
                b = p3 * ratio
                r.attrs = p1.attrs.copy()
                b.attrs = p3.attrs.copy()
            elif self.high_resolution_band == "blue":
                LOG.debug("Sharpening image with high resolution blue band")
                ratio = high_res / p3
                ratio = ratio.where(xu.isfinite(ratio) | (ratio >= 0), 1.)
                r = p1 * ratio
                g = p2 * ratio
                b = high_res
                r.attrs = p1.attrs.copy()
                g.attrs = p2.attrs.copy()
            else:
                # no sharpening
                r = p1
                g = p2
                b = p3
        else:
            datasets = self.check_areas(datasets)
            r, g, b = datasets[:3]
        # combine the masks
        mask = ~(da.isnull(r.data) | da.isnull(g.data) | da.isnull(b.data))
        r = r.where(mask)
        g = g.where(mask)
        b = b.where(mask)

        # Collect information that is the same between the projectables
        # we want to use the metadata from the original datasets since the
        # new r, g, b arrays may have lost their metadata during calculations
        info = combine_metadata(*datasets)
        info.update(new_attrs)
        # Update that information with configured information (including name)
        info.update(self.attrs)
        # Force certain pieces of metadata that we *know* to be true
        info.setdefault("standard_name", "true_color")
        return super(RatioSharpenedRGB, self).__call__((r, g, b), **info)
Ejemplo n.º 4
0
    def __call__(self, datasets, optional_datasets=None, **info):
        if len(datasets) != 3:
            raise ValueError("Expected 3 datasets, got %d" % (len(datasets), ))
        if not all(x.shape == datasets[0].shape for x in datasets[1:]) or \
                (optional_datasets and
                 optional_datasets[0].shape != datasets[0].shape):
            raise IncompatibleAreas('RatioSharpening requires datasets of '
                                    'the same size. Must resample first.')

        new_attrs = {}
        if optional_datasets:
            datasets = self.check_areas(datasets + optional_datasets)
            high_res = datasets[-1]
            p1, p2, p3 = datasets[:3]
            if 'rows_per_scan' in high_res.attrs:
                new_attrs.setdefault('rows_per_scan',
                                     high_res.attrs['rows_per_scan'])
            new_attrs.setdefault('resolution', high_res.attrs['resolution'])
            if self.high_resolution_band == "red":
                LOG.debug("Sharpening image with high resolution red band")
                ratio = high_res / p1
                # make ratio a no-op (multiply by 1) where the ratio is NaN or
                # infinity or it is negative.
                ratio = ratio.where(xu.isfinite(ratio) | (ratio >= 0), 1.)
                r = high_res
                g = p2 * ratio
                b = p3 * ratio
                g.attrs = p2.attrs.copy()
                b.attrs = p3.attrs.copy()
            elif self.high_resolution_band == "green":
                LOG.debug("Sharpening image with high resolution green band")
                ratio = high_res / p2
                ratio = ratio.where(xu.isfinite(ratio) | (ratio >= 0), 1.)
                r = p1 * ratio
                g = high_res
                b = p3 * ratio
                r.attrs = p1.attrs.copy()
                b.attrs = p3.attrs.copy()
            elif self.high_resolution_band == "blue":
                LOG.debug("Sharpening image with high resolution blue band")
                ratio = high_res / p3
                ratio = ratio.where(xu.isfinite(ratio) | (ratio >= 0), 1.)
                r = p1 * ratio
                g = p2 * ratio
                b = high_res
                r.attrs = p1.attrs.copy()
                g.attrs = p2.attrs.copy()
            else:
                # no sharpening
                r = p1
                g = p2
                b = p3
        else:
            datasets = self.check_areas(datasets)
            r, g, b = datasets[:3]
        # combine the masks
        mask = ~(da.isnull(r.data) | da.isnull(g.data) | da.isnull(b.data))
        r = r.where(mask)
        g = g.where(mask)
        b = b.where(mask)

        # Collect information that is the same between the projectables
        # we want to use the metadata from the original datasets since the
        # new r, g, b arrays may have lost their metadata during calculations
        info = combine_metadata(*datasets)
        info.update(new_attrs)
        # Update that information with configured information (including name)
        info.update(self.attrs)
        # Force certain pieces of metadata that we *know* to be true
        info.setdefault("standard_name", "true_color")
        return super(RatioSharpenedRGB, self).__call__((r, g, b), **info)