def test_fcls_unmixing_with_single_endmember_spectra_multicore(self): ''' Should calculate abundance from a mixed image for single endmember spectra; result should be the same for 1 or 2 processes. ''' em_locs = [(326701, 4696895), (324978, 4699651), (328823, 4696835)] arr, gt, wkt = as_array( os.path.join(self.test_dir, 'LT05_020030_merge_19950712_stack_clip.tiff')) endmembers = spectra_at_xy(mnf_rotation(arr).T, em_locs, gt, wkt) arr_mnf = mnf_rotation(arr).T endmembers = spectra_at_xy(arr_mnf, em_locs, gt, wkt) fcls_mapper1 = FCLSAbundanceMapper(arr_mnf[:, 100:110, 100:110], gt, wkt, processes=1) fcls_mapper2 = FCLSAbundanceMapper(arr_mnf[:, 100:110, 100:110], gt, wkt, processes=2) result1 = fcls_mapper1.map_abundance(endmembers) result2 = fcls_mapper2.map_abundance(endmembers) self.assertTrue(np.all(np.equal(result1, result2))) hasher = hashlib.sha256() hasher.update(result2) self.assertEqual(hasher.hexdigest(), self.test_data_100_110_hex_string)
def test_endmember_search_by_maximum_volume(self): '''Should be able to search for endmembers by maximum volume.''' rast, gt, wkt = as_array( os.path.join(self.test_dir, 'multi7_raster.tiff')) hsi_post_mnf = mnf_rotation(rast) # Test that dictionary type works result = endmembers_by_maximum_volume(hsi_post_mnf.T, self.cases, gt=gt, wkt=wkt, dd=False) self.assertTrue(isinstance(result[0], np.ndarray)) # Test that sequence/ array type works result = endmembers_by_maximum_volume(hsi_post_mnf.T, self.cases['Vegetation'], gt=gt, wkt=wkt, dd=False) self.assertTrue(isinstance(result[0], np.ndarray)) ref_target = (300953, 4682679) result = endmembers_by_maximum_volume(hsi_post_mnf.T, self.cases['Vegetation'], ref_target, gt=gt, wkt=wkt, dd=False) self.assertTrue(isinstance(result[0], np.ndarray))
def test_endmember_search_by_maximum_area(self): '''Should be able to search for endmembers by maximum area.''' rast, gt, wkt = as_array( os.path.join(self.test_dir, 'multi7_raster.tiff')) hsi_post_mnf = mnf_rotation(rast) # Test that dictionary type works result = endmembers_by_maximum_area(hsi_post_mnf.T, self.cases, gt=gt, wkt=wkt, dd=False) self.assertTrue(isinstance(result[0], np.ndarray)) self.assertTrue(list(map(np.ceil, result[0][0].tolist())), [15.0, -9.0, 1.0]) self.assertTrue(list(map(np.ceil, result[0][0].tolist())), [16.0, -8.0, 2.0]) # Test that sequence/ array type works result = endmembers_by_maximum_area(hsi_post_mnf.T, self.cases['Vegetation'], gt=gt, wkt=wkt, dd=False) self.assertTrue(isinstance(result[0], np.ndarray)) self.assertTrue(list(map(np.ceil, result[0][0].tolist())), [-12.0, 0.0, 0.0]) self.assertTrue(list(map(np.ceil, result[0][0].tolist())), [-11.0, 1.0, 1.0])
def __init__(self, path=None, mask=None, cut_dim=None, ravel=True, transform=True, nodata=None, feature_limit=90000, selected_feature_limit=30, epsg=None, keyword=None, verbose=False): self.__nodata__ = nodata self.__raveled__ = ravel self.__limit__ = feature_limit self.__sel_limit__ = selected_feature_limit self.__verbose__ = verbose self.epsg = epsg self.size = (9, 9) self.dpi = 72 if path is not None: assert os.path.exists(path), 'No such file or directory' ds = gdal.Open(path) # (p, lat, lng) self.keyword = keyword # A "nickname" for this raster self.__wd__ = os.path.dirname(path) self.__gt__ = ds.GetGeoTransform() self.__wkt__ = ds.GetProjection() self.spatial_ref = {'gt': self.__gt__, 'wkt': self.__wkt__} if keyword is None: # Look for a date (7-8 numbers) set off by underscores date_match = re.compile(r'.*_(?P<date>\d{7,8})_.*').match( os.path.basename(path)) if date_match is not None: self.keyword = date_match.groups()[0] # Apply the MNF transformation? if transform: self.features = mnf_rotation(ds.ReadAsArray()) # (lng, lat, p) else: self.features = ds.ReadAsArray().transpose() if cut_dim: # Get rid of extraneous dimensionality self.features = self.features[..., 0:cut_dim] ds = None # Apply a mask? if mask is not None: if type(mask) == str: mask, gt, wkt = as_array(mask) else: if not isinstance(mask, np.ndarray): mask = mask.ReadAsArray() self.features = binary_mask(self.features.transpose(), mask, nodata=nodata).transpose() mask = None # Create random features self.rfeatures = self.features.copy().reshape( (self.features.shape[0] * self.features.shape[1], self.features.shape[2])) np.random.shuffle(self.rfeatures) # Limit the size of the stored array; keep first 90,000 (300*300) if ravel and nodata is not None: # Remove all "rows" (pixels) where there is a NoData value in any "column" (band) self.rfeatures = self.rfeatures[(self.rfeatures != nodata).any( axis=1), :] if self.__limit__ is not None: self.rfeatures = self.rfeatures[0:self.__limit__, :] else: self.rfeatures = self.rfeatures.reshape(self.features.shape) # If a limit was specified, select the first N random pixels if self.__limit__ is not None: r = int(np.sqrt(self.__limit__)) self.rfeatures = self.rfeatures.reshape( self.features.shape)[0:r, 0:r, :] ds = None