コード例 #1
0
def hall_rectification(reference, subject, out_path, ref_set, sub_set, dd=False, nodata=-9999,
    dtype=np.int32, keys=('High/Bright', 'Low/Dark')):
    '''
    Performs radiometric rectification after Hall et al. (1991) in Remote
    Sensing of Environment. Assumes first raster is the reference image and
    that none of the targets are NoData pixels in the reference image (they
    are filtered out in the subject images). Arguments:
        reference   The reference image, a gdal.Dataset
        subject     The subject image, a gdal.Dataset
        out_path    Path to a directory where the rectified images should be stored
        ref_set     Sequence of two sequences: "bright" radiometric control set,
                    then "dark" radiometric control set for reference image
        sub_set     As with ref_set, a sequence of sequences (e.g., list of two
                    lists): [[<bright targets>], [<dark targets]]
        dd          Coordinates are in decimal degrees?
        dtype       Date type (NumPy dtype) for the array; default is 32-bit Int
        nodata      The NoData value to use fo all the rasters
        keys        The names of the dictionary keys for the bright, dark sets,
                    respectively
    '''
    # Unpack bright, dark control sets for subject image
    bright_targets, dark_targets = (sub_set[keys[0]], sub_set[keys[1]])

    # Calculate the mean reflectance in each band for bright, dark targets
    bright_ref = spectra_at_xy(reference, ref_set[keys[0]], dd=dd).mean(axis=0)
    dark_ref = spectra_at_xy(reference, ref_set[keys[1]], dd=dd).mean(axis=0)

    # Calculate transformation for the target image
    brights = spectra_at_xy(subject, bright_targets, dd=dd) # Prepare to filter NoData pixels
    darks = spectra_at_xy(subject, dark_targets, dd=dd)
    # Get the "subject" image means for each radiometric control set
    mean_bright = brights[
        np.sum(brights, axis=1) != (nodata * brights.shape[1])
    ].mean(axis=0)
    mean_dark = darks[
        np.sum(darks, axis=1) != (nodata * darks.shape[1])
    ].mean(axis=0)

    # Calculate the coefficients of the linear transformation
    m = (bright_ref - dark_ref) / (mean_bright - mean_dark)
    b = (dark_ref * mean_bright - mean_dark * bright_ref) / (mean_bright - mean_dark)

    arr = subject.ReadAsArray()
    shp = arr.shape # Remember the original shape
    mask = arr.copy() # Save the NoData value locations
    m = m.reshape((shp[0], 1))
    b = b.reshape((shp[0], 1)).T.repeat(shp[1] * shp[2], axis=0).T
    arr2 = ((arr.reshape((shp[0], shp[1] * shp[2])) * m) + b).reshape(shp)
    arr2[mask == nodata] = nodata # Re-apply NoData values

    # Dump the raster to a file
    out_path = os.path.join(out_path, 'rect_%s' % os.path.basename(subject.GetDescription()))
    dump_raster(
        array_to_raster(arr2, subject.GetGeoTransform(), subject.GetProjection(), dtype=dtype), out_path)
コード例 #2
0
ファイル: visualize.py プロジェクト: isall/unmixing
    def __spectra__(self, points, dd, scale, domain, nodata=None):
        '''
        Accesses spectral profiles from the stored features for the given
        point coordinates. If `nodata` argument is provided, these are filtered
        from the spectra.
        '''
        assert not self.__raveled__, 'Cannot do this when the input array is raveled'
        spectra = spectra_at_xy(
            self.features.transpose(), points, dd=dd, **
            self.spatial_ref) * scale

        if nodata is not None:
            nodata_array = np.full((1, 6), nodata)
            spectra = spectra[np.all(spectra != nodata_array, 1), :]

        return spectra
コード例 #3
0
ファイル: lsma.py プロジェクト: winggy/unmixing
def iterate_endmember_combinations(rast,
                                   targets,
                                   ref_target=None,
                                   ndim=3,
                                   gt=None,
                                   wkt=None,
                                   dd=False):
    '''
    Creates all possible combinations of endmembers from a common pool or from
    among groups of possible endmembers (when `targets` is a dictionary). When
    a dictionary is provided, endmember combinations will contain (only) one
    endmember from each group, where the groups are defined by dictionary keys,
    i.e., `{'group1': [(x, y), ...], 'group2': [(x, y), ...], ...}`.
    Arguments:
        rast        Input raster array
        targets     Possible endmembers; as a list or dictionary
        ref_target  (Optional) Constrain the optimization to always include
                    this endmember
        ndim        Number of dimensions to limit the search to
        gt          (Optional) A GDAL GeoTransform, required for array input
        wkt         (Optional) A GDAL WKT projection, required for array input
        dd          True if the target coordinates are in decimal degrees
    '''
    # Can accept either a gdal.Dataset or numpy.array instance
    if not isinstance(rast, np.ndarray):
        rastr = rast.ReadAsArray()
        gt = rast.GetGeoTransform()
        wkt = rast.GetProjection()

    else:
        assert gt is not None and wkt is not None, 'gt and wkt arguments required'
        rastr = rast.copy()

    # `targets` is a dictionary
    if isinstance(targets, dict):
        # Get the spectra for these targets; this works in two dimensions only
        target_specs = {}
        for label, cases in targets.items():
            target_specs[label] = spectra_at_xy(rast, targets[label], gt,
                                                wkt)[..., 0:ndim]

        ncom = ndim  # Determinant only defined for square matrices
        if ref_target is not None:
            assert ndim == len(
                targets.keys()
            ) + 1, 'Number of groups among target endmembers should be one less than the dimensionality when ref_target is used'

            ncom -= 1  # If reference target used, form combinations with 1 fewer
            ref_spec = spectra_at_xy(rast, (ref_target, ), gt, wkt,
                                     dd=dd)[..., 0:ndim].reshape((ndim, ))

        # Find all possible combinations of (ncom) of these spectra
        spec_map = list(
            itertools.product(
                *[target_specs[label] for label in target_specs.keys()]))
        coord_map = list(itertools.product(*[t[1] for t in targets.items()]))

    else:
        # Get the spectra for these targets; this works in two dimensions only
        target_specs = spectra_at_xy(rast, targets, gt, wkt)[..., 0:ndim]
        ncom = ndim  # Determinant only defined for square matrices
        if ref_target is not None:
            ncom -= 1  # If reference target used, form combinations with 1 fewer
            ref_spec = spectra_at_xy(rast, (ref_target, ), gt, wkt,
                                     dd=dd)[..., 0:ndim].reshape((ndim, ))

        # Find all possible combinations of (ncom) of these spectra
        combos = list(
            itertools.combinations(range(max(target_specs.shape)), ncom))
        spec_map = [[target_specs[i, :] for i in triad] for triad in combos]
        coord_map = [[targets[i] for i in triad] for triad in combos]

    # Add the reference target to each combination
    if ref_target is not None:
        spec_map = list(map(list, spec_map))
        for spec in spec_map:
            # FIXME Cannot use insert with tuples when dictionary input is provided
            spec.insert(0, ref_spec)

    return (spec_map, coord_map)
コード例 #4
0
ファイル: lsma.py プロジェクト: winggy/unmixing
def endmembers_by_maximum_angle(rast,
                                targets,
                                ref_target,
                                gt=None,
                                wkt=None,
                                dd=False):
    '''
    Locates endmembers in (2-dimensional) feature space as the triad (3-corner
    simplex) that maximizes the angle formed with a reference endmember target.
    Returns the endmember coordinates in feature (not geographic) space.
    Arguments:
        rast        The raster that describes the feature space
        ref_target  The coordinates (in feature space) of a point held fixed
        targets     The coordinates (in feature space) of all other points
        gt          The GDAL GeoTransform
        wkt         The GDAL WKT projection
        dd          True for coordinates in decimal degrees

    Angle calculation from:
    http://stackoverflow.com/questions/2827393/angles-between-two-n-dimensional-vectors-in-python/13849249#13849249
    '''
    def unit_vector(vector):
        # Returns the unit vector of the vector.
        return vector / np.linalg.norm(vector)

    def angle_between(v1, v2):
        # Returns the angle in radians between vectors 'v1' and 'v2'
        v1_u = unit_vector(v1)
        v2_u = unit_vector(v2)
        return np.arccos(np.clip(np.dot(v1_u, v2_u), -1.0, 1.0))

    # Can accept either a gdal.Dataset or numpy.array instance
    if not isinstance(rast, np.ndarray):
        rastr = rast.ReadAsArray()
        gt = rast.GetGeoTransform()
        wkt = rast.GetProjection()

    else:
        assert gt is not None and wkt is not None, 'gt and wkt arguments required'
        rastr = rast.copy()

    # Get the spectra for these targets; this works in two dimensions only
    ref_spec = spectra_at_xy(rast, (ref_target, ), gt, wkt)[..., 0:2].reshape(
        (2, ))
    target_specs = spectra_at_xy(rast, targets, gt, wkt)[..., 0:2]

    # All combinations of 2 of the targets
    combos = list(itertools.combinations(range(max(target_specs.shape)), 2))
    spec_map = [[target_specs[i, :] for i in triad] for triad in combos]
    coord_map = [[targets[i] for i in triad] for triad in combos]

    # Find vectors from ref_spec, not from origin (by vector subtraction)
    #   If (cx) is the ref_spec vector (line from origin to ref_spec),
    #   and (ca) and (cb) are the vectors to the points that form the angle
    #   (axb), then [(cx) - (ca)] and [(cx) - (cb)] are the vectors from point
    #   x to the points a and b, respectively.
    vectors = [(ref_spec - a, ref_spec - b) for a, b in spec_map]
    angles = [angle_between(v1, v2) for v1, v2 in vectors]
    idx = angles.index(max(angles))
    specs = spec_map[idx]  # The optimized spectra
    locs = coord_map[idx]  # The optimized coordinates
    specs.insert(0, ref_spec)  # Add the reference target
    locs.insert(0, ref_target)  # Add the reference coordinates
    return (np.array(specs), locs)
コード例 #5
0
ファイル: lsma.py プロジェクト: winggy/unmixing
    def validate_by_forward_model(self,
                                  ref_image,
                                  abundances,
                                  ref_spectra=None,
                                  ref_em_locations=None,
                                  dd=False,
                                  nodata=-9999,
                                  r=10000,
                                  as_pct=True):
        '''
        Validates LSMA result in the forward model of reflectance, i.e.,
        compares the observed reflectance in the original (mixed) image to the
        abundance predicted by a forward model of reflectance using the
        provided endmember spectra. NOTE: Does not apply in the case of
        multiple endmember spectra; requires only one spectral profile per
        endmember type.
        Arguments:
            ref_image   A raster array of the reference spectra (not MNF-
                        transformed data).
            abundances  A raster array of abundances; a (q x m x n) array for
                        q abundance types (q endmembers).
            ref_spectra With single endmember spectra, user can provide the
                        reference spectra, e.g., the observed reflectance for
                        each endmember (not MNF spectra).
            ref_em_locations With single endmember spectra, user can provide
                        the coordinates of each endmember, so that reference
                        spectra can be extracted for validation.
            dd          True if ref_em_locations provided and the coordinates
                        are in decimal degrees.
            nodata      The NoData value to use.
            r           The number of random samples to take in calculating
                        RMSE.
            as_pct      Report normalized RMSE (as a percentage).
        '''
        rastr = ref_image.copy()
        assert (ref_spectra is not None) or (
            ref_em_locations is not None
        ), 'When single endmember spectra are used, either ref_spectra or ref_em_locations must be provided'

        if ref_spectra is not None:
            assert ref_spectra.shape[0] == abundances.shape[
                0], 'One reference spectra must be provided for each endmember type in abundance map'

        else:
            # Get the spectra for each endmember from the reference dataset
            ref_spectra = spectra_at_xy(ref_image,
                                        ref_em_locations,
                                        self.gt,
                                        self.wkt,
                                        dd=dd)

        # Convert the NoData values to zero reflectance; reshape the array
        rastr[rastr == nodata] = 0
        ref_spectra[ref_spectra == nodata] = 0
        shp = rastr.shape
        arr = rastr.reshape((shp[0], shp[1] * shp[2]))

        # Generate 1000 random sampling indices
        idx = np.random.choice(np.arange(0, arr.shape[1]), r)

        # Predict the reflectances!
        stats = []
        # Get the predicted reflectances
        preds = predict_spectra_from_abundance(ravel(abundances), ref_spectra)
        assert preds.shape == arr.shape, 'Prediction and observation matrices are not the same size'

        # Take the mean RMSE (sum of RMSE divided by number of pixels), after
        #   the residuals are normalized by the number of endmembers
        rmse_value = rmse(arr, preds, idx, n=ref_spectra.shape[0]).sum() / r

        norm = 1
        if as_pct:
            # Divide by the range of the measured data; minimum is zero
            norm = arr.max()
            return str(round(rmse_value / norm * 100, 2)) + '%'

        return round(rmse_value / norm, 2)
コード例 #6
0
def validate_abundance_by_forward_model(
        reference, points, *abundances, r=10000, as_pct=True, dd=True,
        nodata=-9999):
    '''
    Validates abundance through a forward model that predicts the
    reflectance at each pixel from the reflectance of each of
    the endmembers and their fractional abundances. Arguments:
        reference   The reference image (e.g., TM/ETM+ reflectance), an np.ndarray or gdal.Dataset
        points      The XY points at which the endmembers are located
        abundances  One or more abundance maps, as file paths
        r           The number of random samples to take from the forward model
        as_pct      Report RMSE as a percentage?
        dd          XY points are in decimal degrees?
        nodata      The NoData value
    '''
    # Can accept either a gdal.Dataset or numpy.array instance
    if not isinstance(reference, np.ndarray):
        rastr = reference.ReadAsArray()

    else:
        rastr = reference.copy()

    # Get the spectra for each endmember from the reference dataset
    spectra = spectra_at_xy(rastr, points, gt = reference.GetGeoTransform(),
        wkt = reference.GetProjection(), dd = dd)

    # Convert the NoData values to zero reflectance; reshape the array
    rastr[rastr==nodata] = 0
    spectra[spectra==nodata] = 0
    shp = rastr.shape
    rast = rastr.reshape((shp[0], shp[1]*shp[2]))

    # Generate 1000 random sampling indices
    idx = np.random.choice(np.arange(0, rast.shape[1]), r)

    # Predict the reflectances!
    stats = []
    for path in abundances:
        abundance_map, gt, wkt = as_array(path)

        # Get the predicted reflectances
        preds = predict_spectra_from_abundance(ravel(abundance_map), spectra)
        assert preds.shape == rast.shape, 'Prediction and observation matrices are not the same size'

        # Take the mean RMSE (sum of RMSE divided by number of pixels), after
        #   the residuals are normalized by the number of endmembers
        rmse_value = rmse(rast, preds, idx, n = spectra.shape[0]).sum() / r

        norm = 1
        if as_pct:
            # Divide by the range of the measured data; minimum is zero
            norm = rast.max()

        stats.append(rmse_value / norm)

    for i, p, in enumerate(abundances):
        if as_pct:
            print('%s%% -- [%s]' % (str(round(stats[i] * 100, 2)).rjust(15),
                os.path.basename(p)))

        else:
            print('%s -- [%s]' % (str(round(stats[i], 2)).rjust(15),
                os.path.basename(p)))

    return stats