def gen_abundance_maps(data, U, result_path):
    print('Abundance maps generation with NNLS')
    nnls = amp.NNLS()
    amaps = nnls.map(data, U, normalize=True)
    nnls.plot(result_path, colorMap='jet', suffix='gas')
    # return an array of abundance maps
    return amaps
Beispiel #2
0
def NNLS(data, U, umix_source, mask, path):
    import pysptools.abundance_maps as amp
    print('  Testing NNLS')
    nnls = amp.NNLS()
    amaps = nnls.map(data, U, normalize=True)
    nnls.plot(path, colorMap='jet', suffix=umix_source)
    nnls.plot(path, interpolation='spline36', suffix=umix_source + '_spline36')
    return amaps
Beispiel #3
0
def test_NNLS(data, U, umix_source, mask, path):
    print('  Testing NNLS')
    nnls = amp.NNLS()
    pr = profile()
    amap = nnls.map(data, U, normalize=True)
    stat(pr)
    nnls.plot(path, colorMap='jet', suffix=umix_source)
    nnls.plot(path, mask=mask, colorMap='jet', suffix=umix_source + '_mask')
    nnls.plot(path, interpolation='spline36', suffix=umix_source + '_spline36')
Beispiel #4
0
 def __init__(self, hcube, n_em, suffix):
     self.suffix = suffix
     self.nfindr = eea.NFINDR()
     self.U = self.nfindr.extract(hcube,
                                  n_em,
                                  maxit=5,
                                  normalize=False,
                                  ATGP_init=True)
     #        self.xxls = amp.FCLS()
     self.xxls = amp.NNLS()
     self.amaps = self.xxls.map(hcube, self.U, normalize=False)
Beispiel #5
0
def main():

    try:
        import pysptools.eea as eea
    except ImportError:
        gs.fatal(_("Cannot import pysptools \
                      (https://pypi.python.org/pypi/pysptools) library."
                      " Please install it (pip install pysptools)"
                      " or ensure that it is on path"
                      " (use PYTHONPATH variable)."))

    try:
        # sklearn is a dependency of used pysptools functionality
        import sklearn
    except ImportError:
        gs.fatal(_("Cannot import sklearn \
                      (https://pypi.python.org/pypi/scikit-learn) library."
                      " Please install it (pip install scikit-learn)"
                      " or ensure that it is on path"
                      " (use PYTHONPATH variable)."))

    try:
        from cvxopt import solvers, matrix
    except ImportError:
        gs.fatal(_("Cannot import cvxopt \
                      (https://pypi.python.org/pypi/cvxopt) library."
                      " Please install it (pip install cvxopt)"
                      " or ensure that it is on path"
                      " (use PYTHONPATH variable)."))

    # Parse input options
    input = options['input']
    output = options['output']
    prefix = options['prefix']
    endmember_n = int(options['endmember_n'])
    endmembers = options['endmembers']
    if options['maxit']:
        maxit = options['maxit']
    else:
        maxit = 0
    extraction_method = options['extraction_method']
    unmixing_method = options['unmixing_method']
    atgp_init = True if not flags['n'] else False

    # List maps in imagery group
    try:
        maps = gs.read_command('i.group', flags='g', group=input,
                               quiet=True).rstrip('\n').split('\n')
    except:
        pass

    # Validate input
    # q and maxit can be None according to manual, but does not work in current pysptools version
    if endmember_n <= 0:
        gs.fatal('Number of endmembers has to be > 0')
        """if (extraction_method == 'PPI' or
            extraction_method == 'NFINDR'):
            gs.fatal('Extraction methods PPI and NFINDR require endmember_n >= 2')
        endmember_n = None"""

    if maxit <= 0:
        maxit = 3 * len(maps)

    if endmember_n > len(maps) + 1:
        gs.warning('More endmembers ({}) requested than bands in \
                   input imagery group ({})'.format(endmember_n, len(maps)))
        if extraction_method != 'PPI':
            gs.fatal('Only PPI method can extract more endmembers than number \
                     of bands in the imagery group')

    if not atgp_init and extraction_method != 'NFINDR':
        gs.verbose('ATGP is only taken into account in \
                   NFINDR extraction method...')

    # Get metainformation from input bands
    band_types = {}
    img = None
    n = 0
    gs.verbose('Reading imagery group...')
    for m in maps:
        map = m.split('@')

        # Build numpy stack from imagery group
        raster = r.raster2numpy(map[0], mapset=map[1])
        if raster == np.float64:
            raster = float32(raster)
            gs.warning('{} is of type Float64.\
                        Float64 is currently not supported.\
                        Reducing precision to Float32'.format(raster))

        # Determine map type
        band_types[map[0]] = get_rastertype(raster)

        # Create cube and mask from GRASS internal NoData value
        if n == 0:
            img = raster
            # Create mask from GRASS internal NoData value
            mask = mask_rasternd(raster)
        else:
            img = np.dstack((img, raster))
            mask = np.logical_and((mask_rasternd(raster)), mask)

        n = n + 1

    # Read a mask if present and give waringing if not
    # Note that otherwise NoData is read as values
    gs.verbose('Checking for MASK...')
    try:
        MASK = r.raster2numpy('MASK', mapset=getenv('MAPSET')) == 1
        mask = np.logical_and(MASK, mask)
        MASK = None
    except:
        pass

    if extraction_method == 'NFINDR':
    # Extract endmembers from valid pixels using NFINDR function from pysptools
        gs.verbose('Extracting endmembers using NFINDR...')
        nfindr = eea.NFINDR()
        E = nfindr.extract(img, endmember_n, maxit=maxit, normalize=False,
                           ATGP_init=atgp_init, mask=mask)
    elif extraction_method == 'PPI':
    # Extract endmembers from valid pixels using PPI function from pysptools
        gs.verbose('Extracting endmembers using PPI...')
        ppi = eea.PPI()
        E = ppi.extract(img, endmember_n, numSkewers=10000, normalize=False,
                        mask=mask)
    elif extraction_method == 'FIPPI':
    # Extract endmembers from valid pixels using FIPPI function from pysptools
        gs.verbose('Extracting endmembers using FIPPI...')
        fippi = eea.FIPPI()
        # q and maxit can be None according to manual, but does not work
        """if not maxit and not endmember_n:
            E = fippi.extract(img, q=None, normalize=False, mask=mask)
        if not maxit:
            E = fippi.extract(img, q=endmember_n, normalize=False, mask=mask)
        if not endmember_n:
            E = fippi.extract(img, q=int(), maxit=maxit, normalize=False,
                              mask=mask)
        else:
            E = fippi.extract(img, q=endmember_n, maxit=maxit, normalize=False,
                              mask=mask)"""
        E = fippi.extract(img, q=endmember_n, maxit=maxit, normalize=False,
                          mask=mask)

    # Write output file in format required for i.spec.unmix addon
    if output:
        gs.verbose('Writing spectra file...')
        n = 0
        with open(output, 'w') as o:
            o.write('# Channels: {}\n'.format('\t'.join(band_types.keys())))
            o.write('# Wrote {} spectra line wise.\n#\n'.format(endmember_n))
            o.write('Matrix: {0} by {1}\n'.format(endmember_n, len(maps)))
            for e in E:
                o.write('row{0}: {1}\n'.format(n, '\t'.join([str(i) for i in  e])))
                n = n + 1

    # Write vector map with endmember information if requested
    if endmembers:
        gs.verbose('Writing vector map with endmembers...')
        from grass.pygrass import utils as u
        from grass.pygrass.gis.region import Region
        from grass.pygrass.vector import Vector
        from grass.pygrass.vector import VectorTopo
        from grass.pygrass.vector.geometry import Point

        # Build attribute table
        # Deinfe columns for attribute table
        cols = [(u'cat',       'INTEGER PRIMARY KEY')]
        for b in band_types.keys():
            cols.append((b.replace('.','_'), band_types[b]))
        
        # Get region information
        reg = Region()

        # Create vector map
        new = Vector(endmembers)
        new.open('w', tab_name=endmembers, tab_cols=cols)

        cat = 1
        for e in E:
            # Get indices
            idx = np.where((img[:,:]==e).all(-1))

            # Numpy array is ordered rows, columns (y,x)
            if len(idx[0]) == 0 or len(idx[1]) == 0:
                gs.warning('Could not compute coordinated for endmember {}. \
                            Please consider rescaling your data to integer'.format(cat))
                cat = cat + 1
                continue

            coords = u.pixel2coor((idx[1][0], idx[0][0]), reg)
            point = Point(coords[1] + reg.ewres / 2.0,
                          coords[0] - reg.nsres / 2.0)

            # Get attributes
            n = 0
            attr = []
            for b in band_types.keys():
                if band_types[b] == u'INTEGER':
                    attr.append(int(e[n]))
                else:
                    attr.append(float(e[n]))
                n = n + 1

            # Write geometry with attributes
            new.write(point, cat=cat,
                      attrs=tuple(attr))
            cat = cat + 1

        # Close vector map
        new.table.conn.commit()
        new.close(build=True)

    if prefix:
        # Run spectral unmixing
        import pysptools.abundance_maps as amaps
        if unmixing_method == 'FCLS':
            fcls = amaps.FCLS()
            result = fcls.map(img, E, normalize=False, mask=mask)
        elif unmixing_method == 'NNLS':
            nnls = amaps.NNLS()
            result = nnls.map(img, E, normalize=False, mask=mask)
        elif unmixing_method == 'UCLS':
            ucls = amaps.UCLS()
            result = ucls.map(img, E, normalize=False, mask=mask)

        # Write results
        for l in range(endmember_n):
            rastname = '{0}_{1}'.format(prefix, l + 1)
            r.numpy2raster(result[:,:,l], 'FCELL', rastname)
Beispiel #6
0
    def getWaterFraction(cls,
                         ds,
                         cloudThresh=-20,
                         constrain=True,
                         maskClouds=True):

        if maskClouds:
            atmsNoClouds = cls.maskClouds(ds, threshold=cloudThresh)
        else:
            atmsNoClouds = ds.copy()

        dBtr = atmsNoClouds.sel(band='C4').astype(
            np.float) - atmsNoClouds.sel(band='C3').astype(np.float)
        dBtr.coords['band'] = 'dBtr'

        channels = xr.concat([
            atmsNoClouds.sel(band=['C3', 'C4', 'C16']).isel(time=0, z=0),
            dBtr.isel(time=0, z=0)
        ],
                             dim='band')
        arr = channels.values
        arr[np.isnan(arr)] = -9999

        nClasses = 3
        nfindr = eea.NFINDR()
        U = nfindr.extract(arr,
                           nClasses,
                           maxit=100,
                           normalize=True,
                           ATGP_init=True)

        drop = np.argmin(list(map(lambda x: U[x, :].mean(), range(nClasses))))
        waterIdx = np.argmin(
            list(
                map(lambda x: np.delete(U, drop, axis=1)[x, :],
                    range(nClasses - 2))))

        if waterIdx == 0:
            bandList = ['water', 'land', 'mask']
        else:
            bandList = ['land', 'water', 'mask']

        nnls = amp.NNLS()
        amaps = nnls.map(arr, U, normalize=True)

        drop = np.argmin(
            list(map(lambda x: amaps[:, :, x].mean(), range(amaps.shape[2]))))

        unmixed = np.delete(amaps, drop, axis=2)

        unmixed[unmixed == 0] = np.nan

        scaled = np.zeros_like(unmixed)
        for i in range(scaled.shape[2]):
            summed = unmixed[:, :, i] / unmixed.sum(axis=2)
            scaled[:, :,
                   i] = (summed - np.nanmin(summed)) / (np.nanmax(summed) -
                                                        np.nanmin(summed))

        scaled = scaled - 0.25
        scaled[scaled < 0] = 0

        fWater = atmsNoClouds.sel(band=['C1', 'C2', 'mask']).copy()
        fWater[:, :, 0, :2, 0] = scaled[:, :, :]
        fWater.coords['band'] = bandList

        return fWater.raster.updateMask(atmsNoClouds.sel(band='mask'))
Beispiel #7
0
# N-FINDR
# useful in situations where the basis spectra are already present in the dataset
# New modules: pysptools

from pysptools.eea import nfindr
import pysptools.abundance_maps as amp

num_endmembers = 3

# finde the endmembers

comps = nfindr.NFINDR(h5_main[:].copy(), num_endmembers)[0]

# calculate abundance maps

nnls = amp.NNLS()
abundances=nnls.map(h5_main[:].copy().reshape(h5_main.pos_dim_sizes[0],\
h5_main.pos_dim_sizes[1], -1),comps)

# plot the components

usid.plot_utils.plot_map_stack(comps.reshape(num_endmembers,\
h5_main.spec_dim_sizes[0], h5_main.spec_dim_sizes[1]),title = '',\
color_bar_mode='each')

plt.savefig("spectral-decomposition-10.jpg", dpi=300)
plt.show()

# plot the abundances

usid.plot_utils.plot_map_stack(abundances.transpose(2, 0, 1),