def make_reproject(path, final_path, file_name, show='off'): """ Функция для перепроецирования снимков согласно параметрам указываемым в dom. После перепроецирования к файлу снимку генерируется и добавляется маска :param path: Путь до папки в которой лежит файл :param final_path: Путь до папки в которую нужно положить перепроецированный файл :param file_name: Имя файла (исходного) :param show: Флаг для отрисовки [2] канала. По умолчанию 'off', чтобы включить show='on' :return: Перепроецированный файл с иходным file_name """ print path + file_name nansat_obj = Nansat(path + file_name) # Для маленького конечного куска #dom = Domain('+proj=latlong +datum=WGS84 +ellps=WGS84 +no_defs', '-lle -86.20 45.10 -86.10 45.20 -ts 300 300') # Для всего района dom = Domain('+proj=latlong +datum=WGS84 +ellps=WGS84 +no_defs', '-lle -86.3 44.6 -85.2 45.3 -ts 300 200') nansat_obj.reproject(dom) nansat_obj = create_mask(nansat_obj) if show == 'on': plt.imshow(nansat_obj[2]) plt.colorbar() plt.show() nansat_obj.export(final_path + file_name + '.reproject.nc')
def test_export_gcps_complex_to_netcdf(self): """ Should export file with GCPs and write correct complex bands""" n0 = Nansat(self.test_file_gcps, log_level=40, mapper=self.default_mapper) b0 = n0['L_469'] n1 = Nansat.from_domain(n0) n1.add_band(b0.astype('complex64'), parameters={'name': 'L_469'}) tmpfilename = os.path.join(self.tmp_data_path, 'nansat_export_gcps_complex.nc') n1.export(tmpfilename) ncf = Dataset(tmpfilename) self.assertTrue(os.path.exists(tmpfilename)) self.assertTrue('GCPX' in ncf.variables) self.assertTrue('GCPY' in ncf.variables) self.assertTrue('GCPPixel' in ncf.variables) self.assertTrue('GCPLine' in ncf.variables) n2 = Nansat(tmpfilename, mapper=self.default_mapper) b2 = n2['L_469'] lon0, lat0 = n0.get_geolocation_grids() lon2, lat2 = n1.get_geolocation_grids() np.testing.assert_allclose(lon0, lon2) np.testing.assert_allclose(lat0, lat2)
def test_get_auto_ticks_vector(self): n = Nansat(self.test_file_gcps) lon, lat = n.get_geolocation_grids() f = Figure(lon) lonTicks = f._get_auto_ticks([28, 29, 30, 100], lon) self.assertEqual(len(lonTicks), 3)
def test_write_figure_clim(self): n1 = Nansat(self.test_file_stere, logLevel=40) tmpfilename = os.path.join(ntd.tmp_data_path, 'nansat_write_figure_legend.png') n1.write_figure(tmpfilename, 3, clim='hist', legend=True) self.assertTrue(os.path.exists(tmpfilename))
def test_export_netcdf_arctic_hardcopy(self): n = Nansat(self.test_file_arctic, mapper=self.default_mapper) n.export(self.tmp_filename, hardcopy=True) exported = Nansat(self.tmp_filename, mapper=self.default_mapper) self.assertTrue((n[1] == exported[1]).any()) self.assertTrue((n[2] == exported[2]).any()) self.assertTrue((n[3] == exported[3]).any())
def test_write_figure_band(self): n1 = Nansat(self.test_file_stere, logLevel=40) tmpfilename = os.path.join(ntd.tmp_data_path, 'nansat_write_figure_band.png') n1.write_figure(tmpfilename, 2) self.assertTrue(os.path.exists(tmpfilename))
def test_get_GDALRasterBand(self): n = Nansat(self.test_file_gcps, logLevel=40) b = n.get_GDALRasterBand(1) arr = b.ReadAsArray() self.assertEqual(type(b), gdal.Band) self.assertEqual(type(arr), np.ndarray)
def test_crop_no_gcps_arctic(self): n1 = Nansat(self.test_file_arctic, logLevel=40) ext = n1.crop(10, 20, 50, 60) self.assertEqual(n1.shape(), (60, 50)) self.assertEqual(ext, (10, 20, 50, 60)) self.assertEqual(type(n1[1]), np.ndarray)
def test_write_geotiffimage(self): n1 = Nansat(self.test_file_stere, logLevel=40) tmpfilename = os.path.join(ntd.tmp_data_path, 'nansat_write_geotiffimage.tif') n1.write_geotiffimage(tmpfilename) self.assertTrue(os.path.exists(tmpfilename))
def test_crop_lonlat(self): n1 = Nansat(self.test_file_gcps, logLevel=40) ext = n1.crop_lonlat([28, 29], [70.5, 71]) self.assertEqual(n1.shape(), (111, 110)) self.assertEqual(ext, (31, 89, 110, 111)) self.assertEqual(type(n1[1]), np.ndarray)
def test_get_no_transect_interactive(self): import matplotlib.pyplot as plt plt.ion() n1 = Nansat(self.test_file_gcps, logLevel=40) noneResult = n1.get_transect() self.assertEqual(noneResult, None) plt.ioff()
def test_dont_export2thredds_gcps(self): n = Nansat(self.test_file_gcps, log_level=40, mapper=self.default_mapper) n2 = Nansat.from_domain(n) n.add_band(np.ones(n2.shape(), np.float32)) tmpfilename = os.path.join(self.tmp_data_path, 'nansat_export2thredds.nc') self.assertRaises(ValueError, n2.export2thredds, tmpfilename, ['L_645'])
def test_dont_export2thredds_gcps(self): n = Nansat(self.test_file_gcps, logLevel=40) n2 = Nansat(domain=n) n.add_band(np.ones(n2.shape(), np.float32)) tmpfilename = os.path.join(ntd.tmp_data_path, 'nansat_export2thredds.nc') self.assertRaises(OptionError, n2.export2thredds, tmpfilename, ['L_645'])
def test_get_item_inf_expressions(self): ''' inf should be replaced with nan ''' d = Domain(4326, "-te 25 70 35 72 -ts 500 500") n = Nansat(domain=d, logLevel=40) arr = np.empty((500, 500)) n.add_band(arr, {'expression': 'np.array([0,1,2,3,np.inf,5,6,7])'}) self.assertIsInstance(n[1], np.ndarray) self.assertTrue(np.isnan(n[1][4]))
def test_digitize_points(self): ''' shall return empty array in non interactive mode ''' plt.ion() n1 = Nansat(self.test_file_gcps, logLevel=40) points = n1.digitize_points(1) self.assertEqual(len(points), 0) plt.ioff()
def test_bands(self): n = Nansat(self.test_file_gcps, logLevel=40) bands = n.bands() self.assertEqual(type(bands), dict) self.assertTrue(1 in bands) self.assertTrue('name' in bands[1]) self.assertEqual(bands[1]['name'], 'L_645')
def test_special_characters_in_exported_metadata(self): orig = Nansat(self.test_file_gcps, mapper=self.default_mapper) orig.vrt.dataset.SetMetadataItem('jsonstring', json.dumps({'meta1': 'hei', 'meta2': 'derr'})) orig.export(self.tmp_filename) copy = Nansat(self.tmp_filename, mapper=self.default_mapper) dd = json.loads(unescape(copy.get_metadata('jsonstring'), {'"': '"'})) self.assertIsInstance(dd, dict)
def test_export_band_by_name(self): n = Nansat(self.test_file_gcps, log_level=40, mapper=self.default_mapper) tmpfilename = os.path.join(self.tmp_data_path, 'nansat_export_band.tif') n.export(tmpfilename, bands=['L_645'], driver='GTiff') n = Nansat(tmpfilename, mapper=self.default_mapper) self.assertTrue(os.path.exists(tmpfilename)) self.assertEqual(n.vrt.dataset.RasterCount, 1)
def test_export_band(self): n = Nansat(self.test_file_gcps, logLevel=40) tmpfilename = os.path.join(ntd.tmp_data_path, 'nansat_export_band.tif') n.export(tmpfilename, bands= [1], driver='GTiff') n = Nansat(tmpfilename, mapperName='generic') self.assertTrue(os.path.exists(tmpfilename)) self.assertEqual(n.vrt.dataset.RasterCount, 1)
def write_geotiff(self, filename, landmask=True, icemask=True): sar_windspeed, palette = self._get_masked_windspeed(landmask, icemask) nansat_geotiff = Nansat(array=sar_windspeed, domain=self, parameters = {'name': 'masked_windspeed', 'minmax': '0 20'}) nansat_geotiff.write_geotiffimage(filename)
def test_get_time_coverage_start_end(self): n = Nansat(self.test_file_gcps, logLevel=40) n.set_metadata('time_coverage_start', '2016-01-20') n.set_metadata('time_coverage_end', '2016-01-21') self.assertEqual(type(n.time_coverage_start), datetime.datetime) self.assertEqual(type(n.time_coverage_end), datetime.datetime)
def test_get_auto_ticks_number(self): n = Nansat(self.test_file_gcps, mapper=self.default_mapper) lon, lat = n.get_geolocation_grids() f = Figure(lon) lonTicks = f._get_auto_ticks(5, lon) latTicks = f._get_auto_ticks(5, lat) self.assertEqual(len(lonTicks), 5) n.logger.error(str(lonTicks)) n.logger.error(str(latTicks))
def test_watermask(self): ''' if watermask data exists: should fetch array with watermask else: should raise an error''' n1 = Nansat(self.test_file_gcps, logLevel=40) mod44path = os.getenv('MOD44WPATH') if mod44path is not None and os.path.exists(mod44path + '/MOD44W.vrt'): wm = n1.watermask()[1] self.assertEqual(type(wm), np.ndarray) self.assertEqual(wm.shape[0], n1.shape()[0]) self.assertEqual(wm.shape[1], n1.shape()[1])
def test_get_transect_wrong_band(self): n1 = Nansat(self.test_file_gcps, logLevel=40) t = n1.get_transect([[0, 28.31299128], [0, 70.93709219]], [10]) self.assertTrue('line' in t.dtype.fields) self.assertTrue('pixel' in t.dtype.fields) self.assertTrue('lat' in t.dtype.fields) self.assertTrue('lon' in t.dtype.fields) self.assertEqual(type(t['lat']), np.ndarray) self.assertEqual(type(t['lon']), np.ndarray)
def test_reproject_no_addmask(self): ''' Should not add swath mask and return 0 in areas out of swath ''' n = Nansat(self.test_file_complex, logLevel=40) d = Domain(4326, '-te -92.08 26.85 -92.00 26.91 -ts 200 200') n.reproject(d, addmask=False) b = n[1] self.assertTrue(not n.has_band('swathmask')) self.assertTrue(np.isfinite(b[0, 0])) self.assertTrue(np.isfinite(b[100, 100]))
def test_get_transect_wrong_band(self): n1 = Nansat(self.test_file_gcps, log_level=40, mapper=self.default_mapper) t = n1.get_transect([[0, 28.31299128], [0, 70.93709219]], [10]) self.assertTrue('line' in t.dtype.fields) self.assertTrue('pixel' in t.dtype.fields) self.assertTrue('lat' in t.dtype.fields) self.assertTrue('lon' in t.dtype.fields) self.assertEqual(type(t['lat']), np.ndarray) self.assertEqual(type(t['lon']), np.ndarray)
def test_reproject_gcps(self): n1 = Nansat(self.test_file_stere, log_level=40, mapper=self.default_mapper) n2 = Nansat(self.test_file_gcps, log_level=40, mapper=self.default_mapper) n1.reproject(n2) tmpfilename = os.path.join(self.tmp_data_path, 'nansat_reproject_gcps.png') n1.write_figure(tmpfilename, 2, clim='hist') self.assertEqual(n1.shape(), n2.shape()) self.assertEqual(type(n1[1]), np.ndarray)
def test_reproject_no_addmask(self): """ Should not add swath mask and return 0 in areas out of swath """ n = Nansat(self.test_file_gcps, log_level=40, mapper=self.default_mapper) d = Domain(4326, '-te -92.08 26.85 -92.00 26.91 -ts 200 200') n.reproject(d, addmask=False) b = n[1] self.assertTrue(not n.has_band('swathmask')) self.assertTrue(np.isfinite(b[0, 0])) self.assertTrue(np.isfinite(b[100, 100]))
def test_watermask(self): """ if watermask data exists: should fetch array with watermask else: should raise an error """ n1 = Nansat(self.test_file_gcps, log_level=40, mapper=self.default_mapper) mod44path = os.getenv('MOD44WPATH') if mod44path is not None and os.path.exists(mod44path + '/MOD44W.vrt'): wm = n1.watermask()[1] self.assertEqual(type(wm), np.ndarray) self.assertEqual(wm.shape[0], n1.shape()[0]) self.assertEqual(wm.shape[1], n1.shape()[1])
def test_reproject_of_complex(self): """ Should return np.nan in areas out of swath """ n = Nansat(self.test_file_complex, log_level=40, mapper=self.default_mapper) d = Domain(4326, '-te -92.08 26.85 -92.00 26.91 -ts 200 200') n.reproject(d) b = n[1] self.assertTrue(n.has_band('swathmask')) self.assertTrue(np.isnan(b[0, 0])) self.assertTrue(np.isfinite(b[100, 100]))
def test_get_tick_index_from_grid(self): ''' Should return indeces of pixel closest to ticks ''' n = Nansat(self.test_file_gcps, mapper=self.default_mapper) lon, lat = n.get_geolocation_grids() f = Figure(lon) lonTicksIdx = f._get_tick_index_from_grid([28.5, 29], lon, 1, lon.shape[1]) latTicksIdx = f._get_tick_index_from_grid([71, 71.5], lat, lat.shape[0], 1) n.logger.error(str(lonTicksIdx)) n.logger.error(str(latTicksIdx))
def test_get_transect_false(self): n1 = Nansat(self.test_file_gcps, logLevel=40) v, xy, pl = n1.get_transect([(28.31299128, 70.93709219), (28.93691525, 70.69646524)]) self.assertEqual(len(v['1:L_645']), 2) self.assertEqual(len(v['1:L_645']), len(xy)) self.assertEqual(len(v['1:L_645']), len(pl)) self.assertEqual(type(xy['shape0']['latitude']), np.ndarray) self.assertEqual(type(pl['shape0'][0]), np.ndarray)
def test_export_netcdf_arctic(self): ''' Test export of the arctic data without GCPS ''' n = Nansat(self.test_file_arctic) n.export(self.tmpfilename) exported = Nansat(self.tmpfilename) self.assertTrue((n[1] == exported[1]).any()) self.assertTrue((n[2] == exported[2]).any()) self.assertTrue((n[3] == exported[3]).any()) os.unlink(self.tmpfilename)
def test_get_tick_index_from_grid(self): ''' Should return indeces of pixel closest to ticks ''' n = Nansat(self.test_file_gcps) lon, lat = n.get_geolocation_grids() f = Figure(lon) lonTicksIdx = f._get_tick_index_from_grid([28.5, 29], lon, 1, lon.shape[1]) latTicksIdx = f._get_tick_index_from_grid([71, 71.5], lat, lat.shape[0], 1) n.logger.error(str(lonTicksIdx)) n.logger.error(str(latTicksIdx))
def test_get_auto_ticks_number(self): n = Nansat(self.test_file_gcps) lon, lat = n.get_geolocation_grids() f = Figure(lon) lonTicks = f._get_auto_ticks(5, lon) latTicks = f._get_auto_ticks(5, lat) self.assertEqual(len(lonTicks), 5) n.logger.error(str(lonTicks)) n.logger.error(str(latTicks))
def test_export2thredds_stere_one_band(self): # skip the test if anaconda is used if IS_CONDA: return n = Nansat(self.test_file_stere, logLevel=40) tmpfilename = os.path.join(ntd.tmp_data_path, 'nansat_export2thredds_1b.nc') n.export2thredds(tmpfilename, ['L_469']) self.assertTrue(os.path.exists(tmpfilename))
def test_add_band(self): d = Domain(4326, "-te 25 70 35 72 -ts 500 500") arr = np.random.randn(500, 500) n = Nansat(domain=d, logLevel=40) n.add_band(arr, {'name': 'band1'}) self.assertEqual(type(n), Nansat) self.assertEqual(type(n[1]), np.ndarray) self.assertEqual(n.get_metadata('name', 1), 'band1') self.assertEqual(n[1].shape, (500, 500))
def update_icemap_mosaic(inp_filename, inp_data, out_filename, out_domain, out_metadata): if os.path.exists(out_filename): mos_array = Nansat(out_filename)[1] else: mos_array = np.zeros(out_domain.shape(), np.uint8) + 255 # read classification data and reproject onto mosaic domain n = Nansat(inp_filename) if inp_data is None: n.reproject_gcps() n.reproject(out_domain) inp_data = dict(arr=n[1], mask=n[2]) # put data into mosaic array gpi = (inp_data['mask'] == 1) * (inp_data['arr'] < 255) mos_array[gpi] = inp_data['arr'][gpi] # export n_out = Nansat.from_domain(out_domain) n_out.add_band(array=mos_array, parameters={'name': 'classification'}) n_out.set_metadata(n.get_metadata()) n_out.set_metadata(out_metadata) n_out = add_colortable(n_out) n_out.export(out_filename, driver='GTiff', options=['COMPRESS=LZW']) return inp_data
def get_or_create(self, uri, force): # Validate uri - this should raise an exception if the uri doesn't # point to a valid file or stream validate_uri(uri) # Several datasets can refer to the same uri (e.g., scatterometers and svp drifters), so we # need to pass uri_filter_args uris = DatasetURI.objects.filter(uri=uri) # If the ingested uri is already in the database and not <force> ingestion then stop if uris.exists() and not force: return uris[0].dataset, False elif uris.exists() and force: uris[0].dataset.delete() # Open file with Nansat n = Nansat(nansat_filename(uri)) # get metadata from Nansat and get objects from vocabularies n_metadata = n.get_metadata() # set compulsory metadata (source) platform, _ = Platform.objects.get_or_create( json.loads(n_metadata['platform'])) instrument, _ = Instrument.objects.get_or_create( json.loads(n_metadata['instrument'])) specs = n_metadata.get('specs', '') source, _ = Source.objects.get_or_create(platform=platform, instrument=instrument, specs=specs) footprint = Polygon(list(zip(*n.get_border()))) geolocation = GeographicLocation.objects.get_or_create( geometry=footprint)[0] data_center = DataCenter.objects.get_or_create( json.loads(n_metadata['Data Center']))[0] iso_category = ISOTopicCategory.objects.get_or_create( pti.get_iso19115_topic_category('Oceans'))[0] location = Location.objects.get_or_create( json.loads(n_metadata['gcmd_location']))[0] # create dataset ds, created = Dataset.objects.get_or_create( time_coverage_start=make_aware(n.time_coverage_start), time_coverage_end=make_aware( n.time_coverage_start + timedelta(hours=23, minutes=59, seconds=59)), source=source, geographic_location=geolocation, ISO_topic_category=iso_category, data_center=data_center, summary='', gcmd_location=location, access_constraints='', entry_id=lambda: 'NERSC_' + str(uuid.uuid4())) ds_uri, _ = DatasetURI.objects.get_or_create( name=FILE_SERVICE_NAME, service=LOCAL_FILE_SERVICE, uri=uri, dataset=ds) return ds, created
def test_init_domain_array(self): d = Domain(4326, "-te 25 70 35 72 -ts 500 500") n = Nansat(domain=d, array=np.random.randn(500, 500), parameters={'name': 'band1'}, logLevel=40) self.assertEqual(type(n), Nansat) self.assertEqual(type(n[1]), np.ndarray) self.assertEqual(n.get_metadata('name', 1), 'band1') self.assertEqual(n[1].shape, (500, 500))
def test_crop_gcpproj(self): n1 = Nansat(self.test_file_gcps, log_level=40, mapper=self.default_mapper) n1.reproject_gcps() ext = n1.crop(10, 20, 50, 60) xmed = abs(np.median(np.array([gcp.GCPX for gcp in n1.vrt.dataset.GetGCPs()]))) gcpproj = NSR(n1.vrt.dataset.GetGCPProjection() ).ExportToProj4().split(' ')[0] self.assertTrue(xmed > 360) self.assertTrue(gcpproj=='+proj=stere')
def test_dont_export2thredds_gcps(self): n = Nansat(self.test_file_gcps, log_level=40, mapper=self.default_mapper) n2 = Nansat.from_domain(n) n.add_band(np.ones(n2.shape(), np.float32)) tmpfilename = os.path.join(self.tmp_data_path, 'nansat_export2thredds.nc') with self.assertRaises(ValueError) as e: n2.export2thredds(tmpfilename) self.assertIn('Cannot export dataset with GCPS', e.exception.args[0])
def test_resize_complex_alg_average(self): n = Nansat(self.test_file_complex, log_level=40, mapper=self.default_mapper) with warnings.catch_warnings(record=True) as w: n.resize(0.5, resample_alg=-1) self.assertEqual(len(w), 1) self.assertTrue(issubclass(w[-1].category, UserWarning)) self.assertIn( 'The imaginary parts of complex numbers ' 'are lost when resampling by averaging ', str(w[-1].message))
def test_add_subvrts_only_to_one_nansat(self): d = Domain(4326, "-te 25 70 35 72 -ts 500 500") arr = np.random.randn(500, 500) n1 = Nansat.from_domain(d, log_level=40) n2 = Nansat.from_domain(d, log_level=40) n1.add_band(arr, {'name': 'band1'}) self.assertEqual(type(n1.vrt.band_vrts), dict) self.assertTrue(len(n1.vrt.band_vrts) > 0) self.assertEqual(n2.vrt.band_vrts, {})
def test_get_transect_outside(self): n1 = Nansat(self.test_file_gcps, logLevel=40) t = n1.get_transect([[0, 28.31299128], [0, 70.93709219]], [1]) self.assertTrue('L_645' in t.dtype.fields) self.assertTrue('line' in t.dtype.fields) self.assertTrue('pixel' in t.dtype.fields) self.assertTrue('lat' in t.dtype.fields) self.assertTrue('lon' in t.dtype.fields) self.assertEqual(type(t['lat']), np.ndarray) self.assertEqual(type(t['lon']), np.ndarray)
def test_export_netcdf_complex_remove_meta(self): n = Nansat(self.test_file_complex, mapper=self.default_mapper) self.assertEqual(n.get_metadata('PRODUCT_TYPE'), 'SLC') n.export(self.tmp_filename, rm_metadata=['PRODUCT_TYPE']) exported = Nansat(self.tmp_filename, mapper=self.default_mapper) with self.assertRaises(ValueError): exported.get_metadata('PRODUCT_TYPE') self.assertTrue((n[1] == exported[1]).any())
def test_resize_complex_algAverage(self): n = Nansat(self.test_file_complex, logLevel=40) with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") n.resize(0.5, eResampleAlg=-1) self.assertTrue(len(w) == 1) self.assertTrue(issubclass(w[-1].category, UserWarning)) self.assertTrue( 'The imaginary parts of complex numbers ' 'are lost when resampling by averaging ' in str(w[-1].message))
def test_add_bands(self): d = Domain(4326, "-te 25 70 35 72 -ts 500 500") arr = np.random.randn(500, 500) n = Nansat(domain=d, logLevel=40) n.add_bands([arr, arr], [{'name': 'band1'}, {'name': 'band2'}]) self.assertEqual(type(n), Nansat) self.assertEqual(type(n[1]), np.ndarray) self.assertEqual(type(n[2]), np.ndarray) self.assertEqual(n.get_metadata('name', 1), 'band1') self.assertEqual(n.get_metadata('name', 2), 'band2')
def test_write_figure_logo(self): n1 = Nansat(self.test_file_stere, log_level=40, mapper=self.default_mapper) tmpfilename = os.path.join(self.tmp_data_path, 'nansat_write_figure_logo.png') n1.write_figure(tmpfilename, 3, clim='hist', logoFileName=self.test_file_gcps) self.assertTrue(os.path.exists(tmpfilename))
def test_get_transect_pixlin(self): n1 = Nansat(self.test_file_gcps, logLevel=40) t = n1.get_transect([[10, 20], [10, 10]], ['L_645'], lonlat=False) self.assertTrue('L_645' in t.dtype.fields) self.assertTrue('line' in t.dtype.fields) self.assertTrue('pixel' in t.dtype.fields) self.assertTrue('lat' in t.dtype.fields) self.assertTrue('lon' in t.dtype.fields) self.assertEqual(type(t['lat']), np.ndarray) self.assertEqual(type(t['lon']), np.ndarray) self.assertEqual(len(t['lon']), 11)
def write_geotiff(self, filename, landmask=True, icemask=True): sar_windspeed, palette = self._get_masked_windspeed(landmask, icemask) nansat_geotiff = Nansat(array=sar_windspeed, domain=self, parameters={ 'name': 'masked_windspeed', 'minmax': '0 20' }) nansat_geotiff.write_geotiffimage(filename)
def test_export2thredds_rmmetadata(self): n = Nansat(self.test_file_arctic, mapper=self.default_mapper, log_level=40) with warnings.catch_warnings(record=True) as recorded_warnings: n.export2thredds(self.tmp_filename, {'Bristol': { 'type': '>i2' }}, time=datetime.datetime(2016, 1, 20), rmMetadata=['description']) self.assertEqual(recorded_warnings[0].category, NansatFutureWarning)
def test_get_transect_data(self): n1 = Nansat(self.test_file_gcps, log_level=40, mapper=self.default_mapper) b1 = n1[1] t = n1.get_transect([[28.3], [70.9]], [], data=b1) self.assertTrue('input' in t.dtype.fields) self.assertTrue('L_645' not in t.dtype.fields) self.assertTrue('line' in t.dtype.fields) self.assertTrue('pixel' in t.dtype.fields) self.assertTrue('lat' in t.dtype.fields) self.assertTrue('lon' in t.dtype.fields) self.assertEqual(type(t['lat']), np.ndarray) self.assertEqual(type(t['lon']), np.ndarray)
def open_with_nansat(self, filePath, mapper=None, kwargs=None): ''' Ensures that you can open the filePath as a Nansat object ''' if kwargs is None: kwargs = {} try: if mapper: n = Nansat(filePath, mapperName=mapper, **kwargs) else: n = Nansat(filePath, **kwargs) except Exception as e: raise Exception('%s: %s' % (filePath, e.message)) assert type(n) == Nansat
def test_add_latlon_grids_auto(self): ''' Should create figure with lon/lat gridlines spaced automatically ''' tmpfilename = os.path.join(ntd.tmp_data_path, 'figure_latlon_grids_auto.png') n = Nansat(self.test_file_gcps) b = n[1] lon, lat = n.get_geolocation_grids() f = Figure(b) f.process(clim='hist', lonGrid=lon, latGrid=lat) f.save(tmpfilename) self.assertEqual(type(f), Figure) self.assertTrue(os.path.exists(tmpfilename))
def test_get_mask(self): '''Mosaic.Layer should get mask from reprojected file ''' n = Nansat(self.test_file_gcps) n.reproject(self.domain) swathmask = n['swathmask'] l = Layer(self.test_file_gcps) l.make_nansat_object(self.domain) mask = l.get_mask_array() self.assertEqual(type(mask), np.ndarray) self.assertEqual(mask.shape, (650, 700)) np.testing.assert_allclose(mask, swathmask * 64)
def test_write_figure_legend(self): n1 = Nansat(self.test_file_stere, log_level=40, mapper=self.default_mapper) tmpfilename = os.path.join(self.tmp_data_path, 'nansat_write_figure_legend.png') n1.write_figure(tmpfilename, 3, clim='hist', legend=True, titleString="Title String") self.assertTrue(os.path.exists(tmpfilename))
def test_export2thredds_arctic_long_lat(self): n = Nansat(self.test_file_arctic, mapper=self.default_mapper, log_level=40) tmpfilename = os.path.join(self.tmp_data_path, 'nansat_export2thredds_arctic.nc') bands = { 'Bristol': { 'type': '>i2' }, 'Bootstrap': { 'type': '>i2' }, 'UMass_AES': { 'type': '>i2' }, } n.export2thredds(tmpfilename, bands, time=datetime.datetime(2016, 1, 20)) self.assertTrue(os.path.exists(tmpfilename)) g = gdal.Open(tmpfilename) metadata = g.GetMetadata_Dict() # GDAL behaves differently: # Windows: nc-attributes are accessible without 'NC_GLOBAL#' prefix # Linux: nc-attributes are accessible only with 'NC_GLOBAL#' prefix # OSX: ? # Therefore we have to add NC_GLOBAL# and test if such metadata exists nc_prefix = 'NC_GLOBAL#' if not nc_prefix + 'easternmost_longitude' in metadata: nc_prefix = '' self.assertIn(nc_prefix + 'easternmost_longitude', metadata) # Test that the long/lat values are set correctly test_metadata_keys = [ 'easternmost_longitude', 'westernmost_longitude', 'northernmost_latitude', 'southernmost_latitude' ] test_metadata_min = [179, -180, 89.9, 53] test_metadata_max = [180, -179, 90, 54] for i, test_metadata_key in enumerate(test_metadata_keys): medata_value = float(metadata[nc_prefix + test_metadata_key]) self.assertTrue( medata_value >= test_metadata_min[i], '%s is wrong: %f' % (test_metadata_key, medata_value)) self.assertTrue( medata_value <= test_metadata_max[i], '%s is wrong: %f' % (test_metadata_key, medata_value))
def test_export_netcdf_complex_remove_meta(self): ''' Test export of complex data with pixelfunctions ''' n = Nansat(self.test_file_complex) self.assertEqual(n.get_metadata('PRODUCT_TYPE'), 'SLC') n.export(self.tmpfilename, rmMetadata=['PRODUCT_TYPE']) exported = Nansat(self.tmpfilename) with self.assertRaises(OptionError): exported.get_metadata('PRODUCT_TYPE') self.assertTrue((n[1] == exported[1]).any()) os.unlink(self.tmpfilename)