def test_export_netcdf_complex_remove_meta(self): n = Nansat(self.test_file_complex, mapper=self.default_mapper) self.assertEqual(n.get_metadata('PRODUCT_TYPE'), 'SLC') n.export(self.tmp_filename, rm_metadata=['PRODUCT_TYPE']) exported = Nansat(self.tmp_filename, mapper=self.default_mapper) with self.assertRaises(ValueError): exported.get_metadata('PRODUCT_TYPE') self.assertTrue((n[1] == exported[1]).any())
def test_export_netcdf_complex_remove_meta(self): ''' Test export of complex data with pixelfunctions ''' n = Nansat(self.test_file_complex) self.assertEqual(n.get_metadata('PRODUCT_TYPE'), 'SLC') n.export(self.tmpfilename, rmMetadata=['PRODUCT_TYPE']) exported = Nansat(self.tmpfilename) with self.assertRaises(OptionError): exported.get_metadata('PRODUCT_TYPE') self.assertTrue((n[1] == exported[1]).any()) os.unlink(self.tmpfilename)
def test_time_coverage_metadata_of_exported_equals_original(self): orig = Nansat(self.test_file_gcps, mapper=self.default_mapper) orig.set_metadata('time_coverage_start', '2010-01-02T08:49:02.347809') orig.set_metadata('time_coverage_end', '2010-01-02T08:50:03.599373') orig.export(self.tmp_filename) copy = Nansat(self.tmp_filename, mapper=self.default_mapper) self.assertEqual(orig.get_metadata('time_coverage_start'), copy.get_metadata('time_coverage_start')) self.assertEqual(orig.get_metadata('time_coverage_end'), copy.get_metadata('time_coverage_end'))
def test_special_characters_in_exported_metadata(self): orig = Nansat(self.test_file_gcps, mapper=self.default_mapper) orig.vrt.dataset.SetMetadataItem('jsonstring', json.dumps({'meta1': 'hei', 'meta2': 'derr'})) orig.export(self.tmp_filename) copy = Nansat(self.tmp_filename, mapper=self.default_mapper) dd = json.loads(unescape(copy.get_metadata('jsonstring'), {'"': '"'})) self.assertIsInstance(dd, dict)
def test_add_band(self): d = Domain(4326, "-te 25 70 35 72 -ts 500 500") arr = np.random.randn(500, 500) n = Nansat(domain=d, logLevel=40) n.add_band(arr, {'name': 'band1'}) self.assertEqual(type(n), Nansat) self.assertEqual(type(n[1]), np.ndarray) self.assertEqual(n.get_metadata('name', 1), 'band1') self.assertEqual(n[1].shape, (500, 500))
def test_init_domain_array(self): d = Domain(4326, "-te 25 70 35 72 -ts 500 500") n = Nansat(domain=d, array=np.random.randn(500, 500), parameters={'name': 'band1'}, logLevel=40) self.assertEqual(type(n), Nansat) self.assertEqual(type(n[1]), np.ndarray) self.assertEqual(n.get_metadata('name', 1), 'band1') self.assertEqual(n[1].shape, (500, 500))
def test_get_metadata_bandid(self): n1 = Nansat(self.test_file_stere, logLevel=40) m = n1.get_metadata(bandID=1) self.assertEqual(type(m), dict) self.assertTrue('name' in m)
def test_set_metadata_bandid(self): n1 = Nansat(self.test_file_stere, logLevel=40) n1.set_metadata('newKey', 'newVal', 1) m = n1.get_metadata('newKey', 1) self.assertEqual(m, 'newVal')
def get_n(filename, bandName='sigma0_HV', factor=0.5, denoise=False, dB=True, mask_invalid=True, landmask_border=20, correct_hh=False, correct_hh_factor=-0.27, remove_spatial_mean=False, vmin=None, vmax=None, pmin=10, pmax=99, **kwargs): """ Get Nansat object with image data scaled to UInt8 Parameters ---------- filename : str input file name bandName : str name of band in the file factor : float subsampling factor denoise : bool apply denoising of sigma0 ? dB : bool apply conversion to dB ? mask_invalid : bool mask invalid pixels (land, inf, etc) with 0 ? landmask_border : int border around landmask correct_hh : bool perform angular correction of sigma0_HH ? correct_hh_factor : float coefficient in the correction factor sigma0_HH_cor = sigma0_HH + correct_hh_factor * incidence_angle remove_spatial_mean : bool remove spatial mean from image ? vmin : float or None minimum value to convert to 1 vmax : float or None maximum value to convert to 255 pmin : float lower percentile for data scaling if vmin is None pmax : float upper percentile for data scaling if vmax is None **kwargs : dummy parameters for get_denoised_object() Returns ------- n : Nansat object with one band scaled to UInt8 """ if denoise: # run denoising n = get_denoised_object(filename, bandName, factor, **kwargs) else: # open data with Nansat and downsample n = Nansat(filename) if factor != 1: n.resize(factor, resample_alg=-1) # get matrix with data img = n[bandName] # convert to dB if not denoise and dB: img[img <= 0] = np.nan img = 10 * np.log10(img) if correct_hh: img = hh_angular_correction(n, img, bandName, correct_hh_factor) if mask_invalid: mask = get_invalid_mask(img, n, landmask_border) img[mask] = np.nan if remove_spatial_mean: img -= get_spatial_mean(img) # convert to 0 - 255 img = get_uint8_image(img, vmin, vmax, pmin, pmax) # create Nansat with one band only nout = Nansat.from_domain(n, img, parameters={'name': bandName}) nout.set_metadata(n.get_metadata()) # improve geolocation accuracy if len(nout.vrt.dataset.GetGCPs()) > 0: nout.reproject_gcps() nout.vrt.tps = True return nout
def test_get_metadata_key(self): n1 = Nansat(self.test_file_stere, log_level=40, mapper=self.default_mapper) m = n1.get_metadata('filename') self.assertEqual(type(m), str)
def test_get_metadata_wrong_key(self): n1 = Nansat(self.test_file_stere, log_level=40, mapper=self.default_mapper) with self.assertRaises(ValueError): n1.get_metadata('some_crap')
def test_get_metadata_key(self): n1 = Nansat(self.test_file_stere, logLevel=40) m = n1.get_metadata('fileName') self.assertEqual(type(m), str)
def test_get_metadata_wrong_key(self): n1 = Nansat(self.test_file_stere, logLevel=40) m = n1.get_metadata('some_crap') self.assertTrue(m is None)
def test_get_metadata(self): n1 = Nansat(self.test_file_stere, logLevel=40) m = n1.get_metadata() self.assertEqual(type(m), dict) self.assertTrue('fileName' in m)
def test_get_metadata_band_id(self): n1 = Nansat(self.test_file_stere, log_level=40, mapper=self.default_mapper) m = n1.get_metadata(band_id=1) self.assertEqual(type(m), dict) self.assertTrue('name' in m)
def test_set_metadata_band_id(self): n1 = Nansat(self.test_file_stere, log_level=40, mapper=self.default_mapper) n1.set_metadata('newKey', 'newVal', band_id=1) m = n1.get_metadata('newKey', 1) self.assertEqual(m, 'newVal')
def _get_normalized_attributes(self, dataset_info, *args, **kwargs): """Gets dataset attributes using nansat""" normalized_attributes = {} n_points = int(kwargs.get('n_points', 10)) nansat_options = kwargs.get('nansat_options', {}) url_scheme = urlparse(dataset_info).scheme if not 'http' in url_scheme and not 'ftp' in url_scheme: normalized_attributes['geospaas_service_name'] = FILE_SERVICE_NAME normalized_attributes['geospaas_service'] = LOCAL_FILE_SERVICE elif 'http' in url_scheme and not 'ftp' in url_scheme: normalized_attributes['geospaas_service_name'] = DAP_SERVICE_NAME normalized_attributes['geospaas_service'] = OPENDAP_SERVICE elif 'ftp' in url_scheme: raise ValueError( f"Can't ingest '{dataset_info}': nansat can't open remote ftp files" ) # Open file with Nansat nansat_object = Nansat(nansat_filename(dataset_info), log_level=self.LOGGER.getEffectiveLevel(), **nansat_options) # get metadata from Nansat and get objects from vocabularies n_metadata = nansat_object.get_metadata() # set compulsory metadata (source) normalized_attributes['entry_title'] = n_metadata.get( 'entry_title', 'NONE') normalized_attributes['summary'] = n_metadata.get('summary', 'NONE') normalized_attributes['time_coverage_start'] = dateutil.parser.parse( n_metadata['time_coverage_start']).replace(tzinfo=tzutc()) normalized_attributes['time_coverage_end'] = dateutil.parser.parse( n_metadata['time_coverage_end']).replace(tzinfo=tzutc()) normalized_attributes['platform'] = json.loads(n_metadata['platform']) normalized_attributes['instrument'] = json.loads( n_metadata['instrument']) normalized_attributes['specs'] = n_metadata.get('specs', '') normalized_attributes['entry_id'] = n_metadata.get( 'entry_id', 'NERSC_' + str(uuid.uuid4())) # set optional ForeignKey metadata from Nansat or from defaults normalized_attributes['gcmd_location'] = n_metadata.get( 'gcmd_location', pti.get_gcmd_location('SEA SURFACE')) normalized_attributes['provider'] = pti.get_gcmd_provider( n_metadata.get('provider', 'NERSC')) normalized_attributes['iso_topic_category'] = n_metadata.get( 'ISO_topic_category', pti.get_iso19115_topic_category('Oceans')) # Find coverage to set number of points in the geolocation if nansat_object.vrt.dataset.GetGCPs(): nansat_object.reproject_gcps() normalized_attributes['location_geometry'] = GEOSGeometry( nansat_object.get_border_wkt(n_points=n_points), srid=4326) json_dumped_dataset_parameters = n_metadata.get( 'dataset_parameters', None) if json_dumped_dataset_parameters: json_loads_result = json.loads(json_dumped_dataset_parameters) if isinstance(json_loads_result, list): normalized_attributes['dataset_parameters'] = [ get_cf_or_wkv_standard_name(dataset_param) for dataset_param in json_loads_result ] else: raise TypeError( f"Can't ingest '{dataset_info}': the 'dataset_parameters' section of the " "metadata returned by nansat is not a JSON list") else: normalized_attributes['dataset_parameters'] = [] return normalized_attributes
if S == 98 and F == 10: CC = 98 + 10 # color code for iceberg CT = ft.GetFieldAsInteger('CT') else: CC = 2 # color code for bergy water if CT == 1: CC = 1 # color code for open water if CT == 98: CC = 0 # color code for ice free ft.SetField('classID', int(CC)) oLayer.SetFeature(ft) sigma0 = gdal.Open(ifile).ReadAsArray() fp_classID = gdal.Open(ifile) gdal.RasterizeLayer(fp_classID, [1], oLayer, options=["ATTRIBUTE=classID"]) classID = fp_classID.ReadAsArray() classID[classID == sigma0] = 255 classID[np.isnan(classID)] = 99 nansatObjSigma0 = Nansat(ifile) nansatObjIceChart = Nansat.from_domain(array=classID.astype(np.uint8), domain=nansatObjSigma0) nansatObjIceChart.set_metadata(nansatObjSigma0.get_metadata()) nansatObjIceChart.set_metadata('entry_title', 'REPROJECTED_%s_ICE_CHART' % cfg.sourceType) nansatObjIceChart = add_colortable(nansatObjIceChart, cfg.sourceType) nansatObjIceChart.export(ofile, bands=[1], driver='GTiff') if cfg.quicklook: rgb = np.zeros((classID.shape[0], classID.shape[1], 3), 'uint8') for k in colorDict[cfg.sourceType].keys(): rgb[classID == k, :] = colorDict[cfg.sourceType][k] plt.imsave(ofile.replace('.tif', '.png'), rgb)
def save_ice_map(inp_filename, raw_filename, classifier_filename, threads, source, quicklook=False, force=False): """ Load texture features, apply classifier and save ice map """ # get filenames out_filename = inp_filename.replace('_texture_features.npz', '_classified_%s.tif' % source) if os.path.exists(out_filename) and not force: print('Processed file %s already exists.' % out_filename) return out_filename # import classifier plk = pickle.load(open(classifier_filename, "rb")) if type(plk) == list: scaler, clf = plk else: class dummy_class(object): def transform(self, x): return (x) scaler = dummy_class() clf = plk clf.n_jobs = threads # get texture features npz = np.load(inp_filename) features = np.vstack([ npz['textureFeatures'].item()['HH'], npz['textureFeatures'].item()['HV'], npz['incidenceAngle'][np.newaxis, :, :] ]) imgSize = features.shape[1:] features = features.reshape((27, np.prod(imgSize))).T gpi = np.isfinite(features.sum(axis=1)) result = clf.predict(scaler.transform(features[gpi, :])) classImage = np.ones(np.prod(imgSize)) * 255 classImage[gpi] = result classImage = classImage.reshape(imgSize) img_shape = classImage.shape # open original file to get geometry raw_nansat = Nansat(raw_filename) # crop and resize original Nansat to match the ice map raw_shape = raw_nansat.shape() crop = [rshape % ishape for (rshape, ishape) in zip(raw_shape, img_shape)] raw_nansat.crop(0, 0, raw_shape[1] - crop[1], raw_shape[0] - crop[0]) raw_nansat.resize(height=img_shape[0]) raw_nansat.reproject_gcps() # create new Nansat object and add ice map ice_map = Nansat.from_domain(domain=raw_nansat, array=classImage.astype(np.uint8)) ice_map.set_metadata(raw_nansat.get_metadata()) ice_map.set_metadata('entry_title', 'S1_SAR_ICE_MAP') ice_map = add_colortable(ice_map) ice_map.export(out_filename, bands=[1], driver='GTiff') if quicklook: rgb = colorcode_array(classImage) plt.imsave(out_filename.replace('.tif', '.png'), rgb) return out_filename
def test_get_metadata_wrong_key(self): n1 = Nansat(self.test_file_stere, logLevel=40) with self.assertRaises(OptionError): n1.get_metadata('some_crap')
# Open an input file, specify which Mapper to use, set logging level n = Nansat(iFileName, mapperName='generic', logLevel=10) # list bands and georeference of the object print 'Raw Nansat:', n, '\n' # get dictionary with metadata from all bands print 'Bands:', n.bands(), '\n' # get time of the image aquisition print 'Time:', n.get_time()[0], '\n' # set GlobalMetadata n.set_metadata(key='GlobalKey', value='GlobalVal') # get Global Metadata print 'Global Metadata:', n.get_metadata(), '\n' # set BandMetadata to the 1st band n.set_metadata(key='BandKey', value='BandVal', bandID=1) # get 1st Band Metadata print '1st Band Metadata:', n.get_metadata(bandID=1), '\n' # add a band from file (copy the 2nd band to the end (4th band) n.add_band(fileName=n.fileName, bandID=2) # add a band from numpy array (copy the 1st band to the end (5th band)) n.add_band(array=n[1], parameters={'name': 'Name1', 'info': 'copy from the 1st band array'}) # print band list n.list_bands() # get GDAL raster band (2nd band) band = n.get_GDALRasterBand(bandID=2)