def save_ice_map(inp_filename, raw_filename, classifier_filename, threads, source, quicklook, force): """ Load texture features, apply classifier and save ice map """ # get filenames out_filename = inp_filename.replace('_texture_features.npz', '_classified_%s.tif' % source) if os.path.exists(out_filename) and not force: print('Processed file %s already exists.' % out_filename) return out_filename # import classifier clf = pickle.load(open(classifier_filename, "rb")) clf.n_jobs = threads # get texture features npz = np.load(inp_filename) features = np.vstack([ npz['textureFeatures'].item()['HH'], npz['textureFeatures'].item()['HV'], ]) imgSize = features.shape[1:] features = features.reshape((26, np.prod(imgSize))).T gpi = np.isfinite(features.sum(axis=1)) result = clf.predict(features[gpi, :]) classImage = np.ones(np.prod(imgSize)) * 255 classImage[gpi] = result classImage = classImage.reshape(imgSize) img_shape = classImage.shape # open original file to get geometry raw_nansat = Nansat(raw_filename) # crop and resize original Nansat to match the ice map raw_shape = raw_nansat.shape() crop = [rshape % ishape for (rshape, ishape) in zip(raw_shape, img_shape)] raw_nansat.crop(0, 0, raw_shape[1] - crop[1], raw_shape[0] - crop[0]) raw_nansat.resize(height=img_shape[0]) raw_nansat.reproject_gcps() # create new Nansat object and add ice map ice_map = Nansat.from_domain(domain=raw_nansat, array=classImage.astype(np.uint8)) ice_map.set_metadata(raw_nansat.get_metadata()) ice_map.set_metadata('entry_title', 'S1_SAR_ICE_MAP') ice_map = add_colortable(ice_map, source) ice_map.export(out_filename, bands=[1], driver='GTiff') if quicklook: rgb = colorcode_array(classImage, source) plt.imsave(out_filename.replace('.tif', '.png'), rgb) return out_filename
def test_crop(self): n1 = Nansat(self.test_file_gcps, log_level=40, mapper=self.default_mapper) ext = n1.crop(10, 20, 50, 60) self.assertEqual(n1.shape(), (60, 50)) self.assertEqual(ext, (10, 20, 50, 60)) self.assertEqual(type(n1[1]), np.ndarray) n1 = Nansat(self.test_file_gcps, log_level=40, mapper=self.default_mapper) ext = n1.crop(0, 0, 200, 200) self.assertEqual(n1.shape(), (200, 200)) self.assertEqual(ext, (0, 0, 200, 200)) self.assertEqual(type(n1[1]), np.ndarray)
def test_crop_no_gcps_arctic(self): n1 = Nansat(self.test_file_arctic, logLevel=40) ext = n1.crop(10, 20, 50, 60) self.assertEqual(n1.shape(), (60, 50)) self.assertEqual(ext, (10, 20, 50, 60)) self.assertEqual(type(n1[1]), np.ndarray)
def test_crop_lonlat_lims(self): n1 = Nansat(self.test_file_gcps, logLevel=40) st, ext = n1.crop(lonlim=[28, 29], latlim=[70.5, 71]) self.assertEqual(st, 0) self.assertEqual(n1.shape(), (111, 110)) self.assertEqual(ext, (31, 89, 110, 111)) self.assertEqual(type(n1[1]), np.ndarray)
def test_crop_gcpproj(self): n1 = Nansat(self.test_file_gcps, log_level=40, mapper=self.default_mapper) n1.reproject_gcps() ext = n1.crop(10, 20, 50, 60) xmed = abs(np.median(np.array([gcp.GCPX for gcp in n1.vrt.dataset.GetGCPs()]))) gcpproj = NSR(n1.vrt.dataset.GetGCPProjection() ).ExportToProj4().split(' ')[0] self.assertTrue(xmed > 360) self.assertTrue(gcpproj=='+proj=stere')
def test_crop_gcpproj(self): n1 = Nansat(self.test_file_gcps, logLevel=40) n1.reproject_GCPs() ext = n1.crop(10, 20, 50, 60) xmed = abs(np.median(np.array([gcp.GCPX for gcp in n1.vrt.dataset.GetGCPs()]))) gcpproj = NSR(n1.vrt.dataset.GetGCPProjection() ).ExportToProj4().split(' ')[0] self.assertTrue(xmed > 360) self.assertTrue(gcpproj=='+proj=stere')
kmeans = KMeans(n_clusters=9, n_jobs=cfg.numberOfThreads).fit( pca.transform(scaler.transform(features_all))[:, :n_components]) pickle.dump([scaler, pca, n_components, kmeans], open(cfg.kmeansFilename, "wb")) # apply clustering print('*** Exporting files to:') for li, ifile in enumerate(ifiles): ofile = ifile.replace('_texture_features.npz', '_kmeans_clustered.tif') print('[%d/%d] %s' % (li + 1, len(ifiles), ifile.replace('_texture_features.npz', '_kmeans.tif'))) npz = np.load(ifile) tfsHH = npz['textureFeatures'].item()['HH'] tfsHV = npz['textureFeatures'].item()['HV'] imgSize = tfsHH.shape[1:] features = np.vstack([tfsHH, tfsHV]).reshape(26, np.prod(imgSize)).T gpi = np.isfinite(features.sum(axis=1)) kmeansZones = np.zeros(np.prod(imgSize)) # 0 is reserved for void cells kmeansZones[gpi] = 1 + kmeans.predict( pca.transform(scaler.transform(features[gpi]))[:, :n_components]) kmeansZones = kmeansZones.reshape(imgSize) nansatObjSigma0 = Nansat( ifile.replace('_texture_features.npz', '_sigma0_HH_denoised.tif')) if nansatObjSigma0.shape() != imgSize: nansatObjSigma0.crop(0, 0, imgSize[1], imgSize[0]) nansatObjCluster = Nansat.from_domain(array=kmeansZones.astype(np.uint8), domain=nansatObjSigma0) nansatObjCluster.export(ofile, bands=[1], driver='GTiff') if cfg.quicklook: plt.imsave(ofile.replace('.tif', '.png'), kmeansZones, cmap='tab10')
pca = PCA(n_components=n_components).fit(scaler.transform(features_all)) kmeans = KMeans(n_clusters=n_clusters, n_jobs=cfg.numberOfThreads).fit( pca.transform(scaler.transform(features_all))) pickle.dump([scaler, pca, kmeans], open(cfg.kmeansFilename, "wb")) # apply clustering print('*** Exporting files to:') for li, ifile in enumerate(ifiles): ofile = ifile.replace('_texture_features.npz', '_kmeans_clustered.tif') print('[%d/%d] %s' % (li + 1, len(ifiles), ifile.replace('_texture_features.npz', '_kmeans.tif'))) npz = np.load(ifile) tfsHH = npz['textureFeatures'].item()['HH'] tfsHV = npz['textureFeatures'].item()['HV'] incAng = npz['incidenceAngle'][np.newaxis, :, :] imgSize = tfsHH.shape[1:] features = np.vstack([tfsHH, tfsHV, incAng]).reshape(27, np.prod(imgSize)).T gpi = np.isfinite(features.sum(axis=1)) kmeansZones = np.ones(np.prod(imgSize)) * np.nan kmeansZones[gpi] = kmeans.predict( pca.transform(scaler.transform(features[gpi]))) kmeansZones = kmeansZones.reshape(imgSize) nansatObjGamma0 = Nansat( ifile.replace('_texture_features.npz', '_denoised_gamma0_HH.tif')) if nansatObjGamma0.shape() != imgSize: nansatObjGamma0.crop(0, 0, imgSize[1], imgSize[0]) nansatObjCluster = Nansat(array=kmeansZones, domain=nansatObjGamma0) nansatObjCluster.export(ofile, bands=[1], driver='GTiff') plt.imsave(ofile.replace('.tif', '.png'), kmeansZones)