def test_export_gcps_complex_to_netcdf(self): ''' Should export file with GCPs and write correct complex bands''' n0 = Nansat(self.test_file_gcps, logLevel=40) b0 = n0['L_469'] n1 = Nansat(domain=n0) n1.add_band(b0.astype('complex64'), parameters={'name': 'L_469'}) tmpfilename = os.path.join(ntd.tmp_data_path, 'nansat_export_gcps_complex.nc') n1.export(tmpfilename) ncf = netcdf_file(tmpfilename) self.assertTrue(os.path.exists(tmpfilename)) self.assertTrue('GCPX' in ncf.variables) self.assertTrue('GCPY' in ncf.variables) self.assertTrue('GCPPixel' in ncf.variables) self.assertTrue('GCPLine' in ncf.variables) n2 = Nansat(tmpfilename) b2 = n2['L_469'] np.testing.assert_allclose(b0, b2) lon0, lat0 = n0.get_geolocation_grids() lon2, lat2 = n1.get_geolocation_grids() np.testing.assert_allclose(lon0, lon2) np.testing.assert_allclose(lat0, lat2)
def test_export_netcdf_arctic_hardcopy(self): n = Nansat(self.test_file_arctic, mapper=self.default_mapper) n.export(self.tmp_filename, hardcopy=True) exported = Nansat(self.tmp_filename, mapper=self.default_mapper) self.assertTrue((n[1] == exported[1]).any()) self.assertTrue((n[2] == exported[2]).any()) self.assertTrue((n[3] == exported[3]).any())
def make_reproject(path, final_path, file_name, show='off'): """ Функция для перепроецирования снимков согласно параметрам указываемым в dom. После перепроецирования к файлу снимку генерируется и добавляется маска :param path: Путь до папки в которой лежит файл :param final_path: Путь до папки в которую нужно положить перепроецированный файл :param file_name: Имя файла (исходного) :param show: Флаг для отрисовки [2] канала. По умолчанию 'off', чтобы включить show='on' :return: Перепроецированный файл с иходным file_name """ print path + file_name nansat_obj = Nansat(path + file_name) # Для маленького конечного куска #dom = Domain('+proj=latlong +datum=WGS84 +ellps=WGS84 +no_defs', '-lle -86.20 45.10 -86.10 45.20 -ts 300 300') # Для всего района dom = Domain('+proj=latlong +datum=WGS84 +ellps=WGS84 +no_defs', '-lle -86.3 44.6 -85.2 45.3 -ts 300 200') nansat_obj.reproject(dom) nansat_obj = create_mask(nansat_obj) if show == 'on': plt.imshow(nansat_obj[2]) plt.colorbar() plt.show() nansat_obj.export(final_path + file_name + '.reproject.nc')
def test_export_add_geoloc(self, mock_add_geolocation): n = Nansat(self.test_file_arctic, mapper=self.default_mapper) with warnings.catch_warnings(record=True) as recorded_warnings: n.export(self.tmp_filename, addGeoloc=True) self.assertEqual(recorded_warnings[0].category, NansatFutureWarning) self.assertTrue(mock_add_geolocation.called)
def test_export_netcdf_complex_remove_meta(self): n = Nansat(self.test_file_complex, mapper=self.default_mapper) self.assertEqual(n.get_metadata('PRODUCT_TYPE'), 'SLC') n.export(self.tmp_filename, rm_metadata=['PRODUCT_TYPE']) exported = Nansat(self.tmp_filename, mapper=self.default_mapper) with self.assertRaises(ValueError): exported.get_metadata('PRODUCT_TYPE') self.assertTrue((n[1] == exported[1]).any())
def test_export_gtiff(self): n = Nansat(self.test_file_gcps, log_level=40, mapper=self.default_mapper) tmpfilename = os.path.join(self.tmp_data_path, 'nansat_export.tif') n.export(tmpfilename, driver='GTiff') self.assertTrue(os.path.exists(tmpfilename))
def test_export_add_gcps(self): n = Nansat(self.test_file_arctic, mapper=self.default_mapper) with warnings.catch_warnings(record=True) as recorded_warnings: n.export(self.tmp_filename, addGCPs=True, bottomup=True) self.assertEqual(recorded_warnings[0].category, NansatFutureWarning) self.assertEqual(recorded_warnings[1].category, NansatFutureWarning)
def test_export_band_by_name(self): n = Nansat(self.test_file_gcps, logLevel=40) tmpfilename = os.path.join(ntd.tmp_data_path, 'nansat_export_band.tif') n.export(tmpfilename, bands=['L_645'], driver='GTiff') n = Nansat(tmpfilename, mapperName='generic') self.assertTrue(os.path.exists(tmpfilename)) self.assertEqual(n.vrt.dataset.RasterCount, 1)
def boreali_processing(obj, final_path): wavelen = [412, 443, 469, 488, 531, 547, 555, 645, 667, 678] cpa_limits = [0.01, 2, 0.01, 1, 0.01, 1, 10] b = Boreali('michigan', wavelen) n = Nansat(obj) dom = Domain('+proj=latlong +datum=WGS84 +ellps=WGS84 +no_defs', '-lle -86.3 44.6 -85.2 45.3 -ts 300 200') n.reproject(dom) theta = numpy.zeros_like(n[2]) custom_n = Nansat(domain=n) band_rrs_numbers = list(map(lambda x: n._get_band_number('Rrs_' + str(x)), wavelen)) for index in range(0, len(wavelen)): # Преобразуем в Rrsw rrsw = n[band_rrs_numbers[index]] / (0.52 + 1.7 * n[band_rrs_numbers[index]]) custom_n.add_band(rrsw, parameters={'name': 'Rrsw_' + str(wavelen[index]), 'units': 'sr-1', 'wavelength': wavelen[index]}) custom_n = create_mask(custom_n) cpa = b.process(custom_n, cpa_limits, mask=custom_n['mask'], theta=theta, threads=4) custom_n.add_band(array=cpa[0], parameters={'name': 'chl', 'long_name': 'Chlorophyl-a', 'units': 'mg m-3'}) custom_n.add_band(array=cpa[1], parameters={'name': 'tsm', 'long_name': 'Total suspended matter', 'units': 'g m-3'}) custom_n.add_band(array=cpa[2], parameters={'name': 'doc', 'long_name': 'Dissolved organic carbon', 'units': 'gC m-3'}) custom_n.add_band(array=cpa[3], parameters={'name': 'mse', 'long_name': 'Root Mean Square Error', 'units': 'sr-1'}) custom_n.add_band(array=cpa[4], parameters={'name': 'mask', 'long_name': 'L2 Boreali mask', 'units': '1'}) custom_n.export(final_path + obj.split('/')[-1] + 'cpa_deep.nc') fig_params = {'legend': True, 'LEGEND_HEIGHT': 0.5, 'NAME_LOCATION_Y': 0, 'mask_array': cpa[4], 'mask_lut': {1: [255, 255, 255], 2: [128, 128, 128], 4: [200, 200, 255]}} custom_n.write_figure(final_path + obj.split('/')[-1] + 'chl_deep.png', 'chl', clim=[0, 1.], **fig_params) custom_n.write_figure(final_path + obj.split('/')[-1] + 'tsm_deep.png', 'tsm', clim=[0, 1.], **fig_params) custom_n.write_figure(final_path + obj.split('/')[-1] + 'doc_deep.png', 'doc', clim=[0, .2], **fig_params) custom_n.write_figure(final_path + obj.split('/')[-1] + 'mse_deep.png', 'mse', clim=[1e-5, 1e-2], logarithm=True, **fig_params) n.write_figure(final_path + obj.split('/')[-1] + 'rgb_deep.png', [16, 14, 6], clim=[[0, 0, 0], [0.006, 0.04, 0.024]], mask_array=cpa[4], mask_lut={2: [128, 128, 128]})
def test_special_characters_in_exported_metadata(self): orig = Nansat(self.test_file_gcps, mapper=self.default_mapper) orig.vrt.dataset.SetMetadataItem('jsonstring', json.dumps({'meta1': 'hei', 'meta2': 'derr'})) orig.export(self.tmp_filename) copy = Nansat(self.tmp_filename, mapper=self.default_mapper) dd = json.loads(unescape(copy.get_metadata('jsonstring'), {'"': '"'})) self.assertIsInstance(dd, dict)
def test_export_band_by_name(self): n = Nansat(self.test_file_gcps, log_level=40, mapper=self.default_mapper) tmpfilename = os.path.join(self.tmp_data_path, 'nansat_export_band.tif') n.export(tmpfilename, bands=['L_645'], driver='GTiff') n = Nansat(tmpfilename, mapper=self.default_mapper) self.assertTrue(os.path.exists(tmpfilename)) self.assertEqual(n.vrt.dataset.RasterCount, 1)
def test_export_band(self): n = Nansat(self.test_file_gcps, logLevel=40) tmpfilename = os.path.join(ntd.tmp_data_path, 'nansat_export_band.tif') n.export(tmpfilename, bands= [1], driver='GTiff') n = Nansat(tmpfilename, mapperName='generic') self.assertTrue(os.path.exists(tmpfilename)) self.assertEqual(n.vrt.dataset.RasterCount, 1)
def test_export_gcps_filename_warning(self): """ Should export file with GCPs and write correct bands""" n0 = Nansat(self.test_file_gcps, log_level=40, mapper=self.default_mapper) tmpfilename = os.path.join(self.tmp_data_path, 'temp.nc') with warnings.catch_warnings(record=True) as w: n0.export(fileName=tmpfilename) self.assertEqual(len(w), 1) self.assertIn('Nansat.export(fileName', str(w[0].message))
def test_geolocation_of_exportedNC_vs_original(self): """ Lon/lat in original and exported file should coincide """ orig = Nansat(self.test_file_gcps, mapper=self.default_mapper) orig.export(self.tmp_filename) copy = Nansat(self.tmp_filename, mapper=self.default_mapper) lon0, lat0 = orig.get_geolocation_grids() lon1, lat1 = copy.get_geolocation_grids() np.testing.assert_allclose(lon0, lon1) np.testing.assert_allclose(lat0, lat1)
def test_export_netcdf_arctic(self): ''' Test export of the arctic data without GCPS ''' n = Nansat(self.test_file_arctic) n.export(self.tmpfilename) exported = Nansat(self.tmpfilename) self.assertTrue((n[1] == exported[1]).any()) self.assertTrue((n[2] == exported[2]).any()) self.assertTrue((n[3] == exported[3]).any()) os.unlink(self.tmpfilename)
def geolocation_of_exportedNC_vs_original(self, file): orig = Nansat(file) testFile = 'test.nc' orig.export(testFile) copy = Nansat(testFile) lon0, lat0 = orig.get_geolocation_grids() lon1, lat1 = copy.get_geolocation_grids() np.testing.assert_allclose(lon0, lon1) np.testing.assert_allclose(lat0, lat1) os.unlink(ncfile)
def geolocation_of_exportedNC_vs_original(self, file): orig = Nansat(file) testFile = "test.nc" orig.export(testFile) copy = Nansat(testFile) lon0, lat0 = orig.get_geolocation_grids() lon1, lat1 = copy.get_geolocation_grids() np.testing.assert_allclose(lon0, lon1) np.testing.assert_allclose(lat0, lat1) os.unlink(ncfile)
def test_geolocation_of_exportedNC_vs_original(self): ''' Lon/lat in original and exported file should coincide ''' orig = Nansat(self.test_file_gcps) tmpfilename = os.path.join(ntd.tmp_data_path, 'nansat_export_gcps.nc') orig.export(tmpfilename) copy = Nansat(tmpfilename) lon0, lat0 = orig.get_geolocation_grids() lon1, lat1 = copy.get_geolocation_grids() np.testing.assert_allclose(lon0, lon1) np.testing.assert_allclose(lat0, lat1)
def test_time_coverage_metadata_of_exported_equals_original(self): orig = Nansat(self.test_file_gcps, mapper=self.default_mapper) orig.set_metadata('time_coverage_start', '2010-01-02T08:49:02.347809') orig.set_metadata('time_coverage_end', '2010-01-02T08:50:03.599373') orig.export(self.tmp_filename) copy = Nansat(self.tmp_filename, mapper=self.default_mapper) self.assertEqual(orig.get_metadata('time_coverage_start'), copy.get_metadata('time_coverage_start')) self.assertEqual(orig.get_metadata('time_coverage_end'), copy.get_metadata('time_coverage_end'))
def test_export_netcdf_complex_remove_meta(self): ''' Test export of complex data with pixelfunctions ''' n = Nansat(self.test_file_complex) self.assertEqual(n.get_metadata('PRODUCT_TYPE'), 'SLC') n.export(self.tmpfilename, rmMetadata=['PRODUCT_TYPE']) exported = Nansat(self.tmpfilename) with self.assertRaises(OptionError): exported.get_metadata('PRODUCT_TYPE') self.assertTrue((n[1] == exported[1]).any()) os.unlink(self.tmpfilename)
def test_reproject_and_export_band(self): n1 = Nansat(self.test_file_gcps, logLevel=40) n2 = Nansat(self.test_file_stere, logLevel=40) n1.reproject(n2) tmpfilename = os.path.join(ntd.tmp_data_path, 'nansat_reproject_export_band.nc') n1.export(tmpfilename, bands=[1]) n = Nansat(tmpfilename, mapperName='generic') self.assertTrue(os.path.exists(tmpfilename)) self.assertEqual(n.vrt.dataset.RasterCount, 1)
def test_geolocation_of_exportedNC_vs_original(self): ''' Lon/lat in original and exported file should coincide ''' orig = Nansat(self.test_file_gcps) orig.export(self.tmpfilename) copy = Nansat(self.tmpfilename) lon0, lat0 = orig.get_geolocation_grids() lon1, lat1 = copy.get_geolocation_grids() np.testing.assert_allclose(lon0, lon1) np.testing.assert_allclose(lat0, lat1) os.unlink(self.tmpfilename)
def test_export_band(self): n = Nansat(self.test_file_gcps, log_level=40, mapper=self.default_mapper) tmpfilename = os.path.join(self.tmp_data_path, 'nansat_export_band.tif') n.export(tmpfilename, bands=[1], driver='GTiff') n = Nansat(tmpfilename, mapper=self.default_mapper) self.assertTrue(os.path.exists(tmpfilename)) self.assertEqual(n.vrt.dataset.RasterCount, 1)
def test_export_netcdf_complex_remove_meta(self): n = Nansat(self.test_file_complex, mapper=self.default_mapper) self.assertEqual(n.get_metadata('PRODUCT_TYPE'), 'SLC') with warnings.catch_warnings(record=True) as recorded_warnings: n.export(self.tmp_filename, rmMetadata=['PRODUCT_TYPE']) self.assertEqual(recorded_warnings[0].category, NansatFutureWarning) exported = Nansat(self.tmp_filename, mapper=self.default_mapper) with self.assertRaises(ValueError): exported.get_metadata('PRODUCT_TYPE') self.assertTrue((n[1] == exported[1]).any())
def test_special_characters_in_exported_metadata(self): orig = Nansat(self.test_file_gcps, mapper=self.default_mapper) orig.vrt.dataset.SetMetadataItem( 'jsonstring', json.dumps({ 'meta1': 'hei', 'meta2': 'derr' })) orig.export(self.tmp_filename) copy = Nansat(self.tmp_filename, mapper=self.default_mapper) dd = json.loads( unescape(copy.get_metadata('jsonstring'), {'"': '"'})) self.assertIsInstance(dd, dict)
def test_export_selected_bands(self): n = Nansat(self.test_file_gcps) resfile = 'tmp.nc' new_band = np.random.randn(n.shape()[0], n.shape()[1]) n.add_band(new_band, {'name': 'newBand'}) # Test with band numbers n.export(resfile, bands=[4, 2]) self.assertTrue(os.path.exists(resfile)) nn = Nansat(resfile) self.assertTrue(nn.has_band('newBand')) self.assertTrue(nn.has_band('L_555')) os.unlink(resfile)
def test_export_option(self): n = Nansat(self.test_file_arctic) tmpfilename = os.path.join(ntd.tmp_data_path, 'nansat_export_option.nc') # Test with band numbers n.export(tmpfilename, options='WRITE_LONLAT=YES') n.export(tmpfilename + '2', options=['WRITE_LONLAT=YES']) nn = Nansat(tmpfilename) nn2 = Nansat(tmpfilename + '2') self.assertTrue(nn.has_band('lon')) self.assertTrue(nn.has_band('lat')) self.assertTrue(nn.has_band('Bristol')) self.assertTrue(nn2.has_band('lon')) self.assertTrue(nn2.has_band('lat')) self.assertTrue(nn2.has_band('Bristol'))
def test_export_netcdf(self): """ Test export and following import of data with bands containing np.nan values """ n = Nansat(self.test_file_gcps, mapper=self.default_mapper) arrNoNaN = np.random.randn(n.shape()[0], n.shape()[1]) n.add_band(arrNoNaN, {'name': 'testBandNoNaN'}) arrWithNaN = arrNoNaN.copy() arrWithNaN[int(n.shape()[0] / 2.) - 10:int(n.shape()[0] / 2 + 10), int(n.shape()[1] / 2.) - 10:int(n.shape()[1] / 2 + 10)] = np.nan n.add_band(arrWithNaN, {'name': 'testBandWithNaN'}) n.export(self.tmp_filename) exported = Nansat(self.tmp_filename, mapper=self.default_mapper) earrNoNaN = exported['testBandNoNaN'] # Use allclose to allow some roundoff errors self.assertTrue(np.allclose(arrNoNaN, earrNoNaN)) earrWithNaN = exported['testBandWithNaN'] np.testing.assert_allclose(arrWithNaN, earrWithNaN)
def test_export_netcdf(self): ''' Test export and following import of data with bands containing np.nan values ''' n = Nansat(self.test_file_gcps) arrNoNaN = np.random.randn(n.shape()[0], n.shape()[1]) n.add_band(arrNoNaN, {'name': 'testBandNoNaN'}) arrWithNaN = arrNoNaN.copy() arrWithNaN[n.shape()[0] / 2 - 10:n.shape()[0] / 2 + 10, n.shape()[1] / 2 - 10:n.shape()[1] / 2 + 10] = np.nan n.add_band(arrWithNaN, {'name': 'testBandWithNaN'}) n.export(self.tmpfilename) exported = Nansat(self.tmpfilename) earrNoNaN = exported['testBandNoNaN'] # Use allclose to allow some roundoff errors self.assertTrue(np.allclose(arrNoNaN, earrNoNaN)) earrWithNaN = exported['testBandWithNaN'] np.testing.assert_allclose(arrWithNaN, earrWithNaN) os.unlink(self.tmpfilename)
def test_export_gcps_to_netcdf(self): ''' Should export file with GCPs and write correct bands''' n0 = Nansat(self.test_file_gcps, logLevel=40) tmpfilename = os.path.join(ntd.tmp_data_path, 'nansat_export_gcps.nc') n0.export(tmpfilename) ncf = netcdf_file(tmpfilename) self.assertTrue(os.path.exists(tmpfilename)) self.assertTrue('GCPX' in ncf.variables) self.assertTrue('GCPY' in ncf.variables) self.assertTrue('GCPPixel' in ncf.variables) self.assertTrue('GCPLine' in ncf.variables) n1 = Nansat(tmpfilename) b0 = n0['L_469'] b1 = n1['L_469'] np.testing.assert_allclose(b0, b1) lon0, lat0 = n0.get_geolocation_grids() lon1, lat1 = n1.get_geolocation_grids() np.testing.assert_allclose(lon0, lon1) np.testing.assert_allclose(lat0, lat1)
n.write_map(oFileName + 'map.png') # Write indexed picture with data from the first band n.write_figure(oFileName + '.png', clim='hist') # Reproject input image onto map of Norwegian Coast # 1. Create domain describing the desired map # 2. Transform the original satellite image # 3. Write the transfromed image into RGB picture dLatlong = Domain("+proj=latlong +datum=WGS84 +ellps=WGS84 +no_defs", "-te 27 70.2 31 71.5 -ts 500 500") n.reproject(dLatlong) n.write_figure(oFileName + 'pro.png', bands=[1, 2, 3], clim=[0, 100]) # Export projected satelite image into NetCDF format n.export(oFileName + '.nc') # Collect values from interactively drawn transect # 1. draw transect interactively # 2. plot the values values, lonlat, pixlinCoord = n.get_transect() plt.plot(lonlat[0], values[0], '.-'); plt.show() # run tests of other nansat components import test_domain import test_nansat import test_figure import test_nansatmap import test_nansatshape import test_mosaic import test_pointbrowser
def boreali_osw_processing(obj, final_path): """ Мой код в данной функции основан на tutorial.py который я нашел в репозитории boreali. :param obj: путь до изображения :param final_path: Путь для сохранения файлов :return: """ wavelen = [412, 443, 469, 488, 531, 547, 555, 645, 667, 678] cpa_limits = [0.01, 2, 0.01, 1, 0.01, 1, 10] h = get_deph() # Глубина исследуемого района по батиметрии b = Boreali('michigan', wavelen) n = Nansat(obj) dom = Domain('+proj=latlong +datum=WGS84 +ellps=WGS84 +no_defs', '-lle -86.3 44.6 -85.2 45.3 -ts 300 200') n.reproject(dom) theta = numpy.zeros_like(n[2]) custom_n = Nansat(domain=n) band_rrs_numbers = list(map(lambda x: n._get_band_number('Rrs_' + str(x)), wavelen)) # Получаем список номеров бандов в которых лежат значения Rrs # для корректной работы складываем в custom_n значения и Rrs и Rrsw for index in range(0, len(wavelen)): rrsw = n[band_rrs_numbers[index]] / (0.52 + 1.7 * n[band_rrs_numbers[index]]) # Пересчитываем Rrs в Rrsw custom_n.add_band(rrsw, parameters={'name': 'Rrsw_' + str(wavelen[index]), # Складываем в новый объект Rrsw 'units': 'sr-1', 'wavelength': wavelen[index]}) # Складываем в новый объект значения Rrs custom_n.add_band(n[band_rrs_numbers[index]], parameters={'name': 'Rrs_' + str(wavelen[index]), 'units': 'sr-1', 'wavelength': wavelen[index]}) custom_n = create_mask(custom_n) cpa = b.process(custom_n, cpa_limits, mask=custom_n['mask'], depth=h, theta=theta, threads=4) custom_n.add_band(array=cpa[0], parameters={'name': 'chl', 'long_name': 'Chlorophyl-a', 'units': 'mg m-3'}) custom_n.add_band(array=cpa[1], parameters={'name': 'tsm', 'long_name': 'Total suspended matter', 'units': 'g m-3'}) custom_n.add_band(array=cpa[2], parameters={'name': 'doc', 'long_name': 'Dissolved organic carbon', 'units': 'gC m-3'}) custom_n.add_band(array=cpa[3], parameters={'name': 'mse', 'long_name': 'Root Mean Square Error', 'units': 'sr-1'}) custom_n.add_band(array=cpa[4], parameters={'name': 'mask', 'long_name': 'L2 Boreali mask', 'units': '1'}) custom_n.export(final_path + obj.split('/')[-1] + 'cpa_OSW.nc') fig_params = {'legend': True, 'LEGEND_HEIGHT': 0.5, 'NAME_LOCATION_Y': 0, 'mask_array': cpa[4], 'mask_lut': {1: [255, 255, 255], 2: [128, 128, 128], 4: [200, 200, 255]}} custom_n.write_figure(final_path + obj.split('/')[-1] + 'chl_OSW.png', 'chl', clim=[0, 1.], **fig_params) custom_n.write_figure(final_path + obj.split('/')[-1] + 'tsm_OSW.png', 'tsm', clim=[0, 1.], **fig_params) custom_n.write_figure(final_path + obj.split('/')[-1] + 'doc_OSW.png', 'doc', clim=[0, .2], **fig_params) custom_n.write_figure(final_path + obj.split('/')[-1] + 'mse_OSW.png', 'mse', clim=[1e-5, 1e-2], logarithm=True, **fig_params) n.write_figure(final_path + obj.split('/')[-1] + 'rgb_OSW.png', [16, 14, 6], clim=[[0, 0, 0], [0.006, 0.04, 0.024]], mask_array=cpa[4], mask_lut={2: [128, 128, 128]})
pca = PCA(n_components=n_components).fit(scaler.transform(features_all)) kmeans = KMeans(n_clusters=n_clusters, n_jobs=cfg.numberOfThreads).fit( pca.transform(scaler.transform(features_all))) pickle.dump([scaler, pca, kmeans], open(cfg.kmeansFilename, "wb")) # apply clustering print('*** Exporting files to:') for li, ifile in enumerate(ifiles): ofile = ifile.replace('_texture_features.npz', '_kmeans_clustered.tif') print('[%d/%d] %s' % (li + 1, len(ifiles), ifile.replace('_texture_features.npz', '_kmeans.tif'))) npz = np.load(ifile) tfsHH = npz['textureFeatures'].item()['HH'] tfsHV = npz['textureFeatures'].item()['HV'] incAng = npz['incidenceAngle'][np.newaxis, :, :] imgSize = tfsHH.shape[1:] features = np.vstack([tfsHH, tfsHV, incAng]).reshape(27, np.prod(imgSize)).T gpi = np.isfinite(features.sum(axis=1)) kmeansZones = np.ones(np.prod(imgSize)) * np.nan kmeansZones[gpi] = kmeans.predict( pca.transform(scaler.transform(features[gpi]))) kmeansZones = kmeansZones.reshape(imgSize) nansatObjGamma0 = Nansat( ifile.replace('_texture_features.npz', '_denoised_gamma0_HH.tif')) if nansatObjGamma0.shape() != imgSize: nansatObjGamma0.crop(0, 0, imgSize[1], imgSize[0]) nansatObjCluster = Nansat(array=kmeansZones, domain=nansatObjGamma0) nansatObjCluster.export(ofile, bands=[1], driver='GTiff') plt.imsave(ofile.replace('.tif', '.png'), kmeansZones)
# Resize the data to 50% using CubicSpline n.resize_lite(0.5, eResampleAlg=3) # make simple indexed image from 1st band with default colormap n.write_figure(oFileName + '02CubicSpline.png', clim='hist') # undo resize n.resize() # make image with map of the file location n.write_map(oFileName + '04_map.png') # Writes an 8-bit GeoTiff image for a given band n.write_geotiffimage(oFileName + '05_geotiff.tif', bandID=1) # create a NetCDF file with all bands n.export(oFileName + '06a.nc') n.export(oFileName + '06b.nc', bottomup=True) # create a GTiff file with one band (default driver is NetCDF) n.export_band(oFileName + '07.tif', bandID=1, driver='GTiff') # get array with watermask (landmask) # -- Get Nansat object with watermask wm = n.watermask()[1] # -- Reproject with cubic interpolation d = Domain(4326, "-te 27 70.3 31 71.5 -ts 300 300") n.reproject(d, 2) # -- Write image n.write_figure(oFileName + '08_pro.png', clim='hist')
def process_boreali(self, opts): '''Advanced processing of MODIS images: retrieve chl, tsm, doc with boreali generate images ''' pnDefaults = { 'lmchl': [0, 5, False], 'lmtsm': [0, 3, False], 'lmdoc': [0, 2, False], 'lmmse': [1e-8, 1e-5, True]} borMinMax = [[pnDefaults['lmchl'][0], pnDefaults['lmchl'][1]], [pnDefaults['lmtsm'][0], pnDefaults['lmtsm'][1]], [pnDefaults['lmdoc'][0], pnDefaults['lmdoc'][1]]] dtsDomain = Domain(opts['srs'], opts['ext']) fileName = self.get_metadata('name') oBaseFileName = self.get_metadata('name').strip('"').strip("'") ncName = opts['oDir'] + oBaseFileName + '.nc' print ncName prodFileNames = {} for pn in opts['prods']: prodFileNames[pn] = '%s/%s.%s.png' % (opts['oDir'], oBaseFileName, pn) if os.path.exists(ncName): print '%s already exist!' % ncName else: # good bits for NRT #self.add_mask(cloudBits=[1, 4, 5, 6, 9, 10, 13, 15, 20, 21, 23, 28, 29, 30]) try: self.reproject(dtsDomain) except: print 'Cannot reproject %s. Skipping' % fileName return 1 else: Rrsw_412 = self['Rrsw_412'] if Rrsw_412 is None: return 1 # process input with BOREALI b = Boreali(model='northsea', zone='northsea') cImg = b.process_lm(self, wavelen=[412, 443, 488, 531, 555, 667], start=opts['start'], minmax=borMinMax) # generate Nansat with results img2 = Nansat(domain=self) for i, pn in enumerate(opts['prods']): img2.add_band(array=cImg[i, :, :], parameters={'name': pn}) img2.add_band(array=self['mask'], parameters={'name': 'mask'}) # export results into NC-file img2.export(ncName) # write images with concentrations for pn in opts['prods']: pnd = pnDefaults[pn] img2.write_figure(prodFileNames[pn], pn, clim=[pnd[0], pnd[1]], legend=True, logarithm=pnd[2]) return 0
def test_export_add_geoloc(self, mock_add_geolocation): n = Nansat(self.test_file_arctic, mapper=self.default_mapper) n.export(self.tmp_filename, add_geolocation=True) self.assertTrue(mock_add_geolocation.called)
n.reproject() n2.reproject(n) n2.write_figure(fileName=oFileName + '_proj_2onto1.png', bands=[1,2,3], clim='hist') # Reproject onto grids of lat/lon dFromGrids = Domain(lon=lonGrid, lat=latGrid) n2.reproject(dFromGrids) n2.write_figure(fileName=oFileName + '_proj_on_grid.png', bands=[1,2,3], clim='hist') # reproject onto automatically generated domain dstDomainAuto = Domain(srs="+proj=latlong +datum=WGS84 +ellps=WGS84 +no_defs", ds=n.raw.dataset) n.reproject(dstDomainAuto) n.write_figure(fileName=oFileName + '_proj_1auto.png', bands=[1,2,3], clim='hist') # export all data into NetCDF format n.export(oFileName + '_0.nc') # export one band to GeoTIFF n.export_band(oFileName + '_2.tif', bandID=2, driver='GTiff') # create new object from given domain and array # 1. Reproject the current object # 2. Get array with data # 2. Create new Nansat object from the given array and for given domain n.reproject(dStereo) array = n[1] nStereo = Nansat(domain=dStereo, array=array, parameters={'name': 'band1'}) print 'Stereo Nansat:', nStereo # add band from array to existing object # 0. Cancel reprojection. Adding bands works only on non-reprojected data
def test_export_function_with_ds_from_setup(self): n = Nansat(self.tmp_ncfile) res = n.export(self.filename_exported) self.assertEqual(res, None)
vInd = np.argwhere(S == np.max(S)).flatten()[-1] else: vInd = np.argwhere(C == np.max(C)).flatten()[-1] S = S[vInd] F = F[vInd] if S == 99 and F == 8: S = 99 + 8 # fast ice if S == 98 and F == 10: S = 98 + 10 # iceberg CT = ft.GetFieldAsInteger('CT') if CT == 1: S = 1 # open water else: continue ft.SetField('classID', int(S)) oLayer.SetFeature(ft) fp_GAMMA0 = gdal.Open(ifile) GAMMA0 = fp_GAMMA0.ReadAsArray() fp_classID = gdal.Open(ifile) gdal.RasterizeLayer(fp_classID, [1], oLayer, options=["ATTRIBUTE=classID"]) classID = fp_classID.ReadAsArray() classID[classID == GAMMA0] = 99 classID[np.isnan(classID)] = 99 nansatObjGamma0 = Nansat(ifile) nansatObjIceChart = Nansat(array=classID, domain=nansatObjGamma0) nansatObjIceChart.export(ofile, bands=[1], driver='GTiff') rgb = np.zeros((classID.shape[0], classID.shape[1], 3), 'uint8') for k in colorDict.keys(): rgb[classID == k, :] = colorDict[k] plt.imsave(ofile.replace('.tif', '.png'), rgb)
def test_export_gtiff(self): n = Nansat(self.test_file_gcps, logLevel=40) tmpfilename = os.path.join(ntd.tmp_data_path, 'nansat_export.tif') n.export(tmpfilename, driver='GTiff') self.assertTrue(os.path.exists(tmpfilename))