def get_geolocation_grids(self): """Read values of geolocation grids""" lon_dataset = gdal.Open(self.data['X_DATASET']) lon_grid = lon_dataset.GetRasterBand(int(self.data['X_BAND'])).ReadAsArray() lat_dataset = gdal.Open(self.data['Y_DATASET']) lat_grid = lat_dataset.GetRasterBand(int(self.data['Y_BAND'])).ReadAsArray() return lon_grid, lat_grid
def test_set_fake_gcps(self): ds = gdal.Open('NETCDF:"%s":UMass_AES' % self.test_file_arctic) gcps = gdal.Open(self.test_file_gcps).GetGCPs() vrt = VRT.copy_dataset(ds) dst_wkt = vrt._set_fake_gcps(self.nsr_wkt, gcps, 1) self.assertEqual(dst_wkt, None) self.assertEqual(len(vrt.dataset.GetGCPs()), len(gcps)) self.assertEqual([gcp.GCPPixel for gcp in gcps], [gcp.GCPX for gcp in vrt.dataset.GetGCPs()]) self.assertEqual([gcp.GCPLine for gcp in gcps], [gcp.GCPY for gcp in vrt.dataset.GetGCPs()])
def test_create_band(self): array = gdal.Open(self.test_file_gcps).ReadAsArray()[1, 10:, :] vrt1 = VRT.from_array(array) vrt2 = VRT(x_size=array.shape[1], y_size=array.shape[0]) self.assertEqual(vrt2.dataset.RasterCount, 0) vrt2.create_band({'SourceFilename': vrt1.filename}) self.assertEqual(vrt2.dataset.RasterCount, 1)
def test_fix_global_metadata(self): ds = gdal.Open(self.test_file_gcps) vrt = VRT.copy_dataset(ds) vrt.dataset.SetMetadataItem(str('test'), str('"test"')) vrt.fix_global_metadata(['AREA_OR_POINT']) self.assertNotIn('AREA_OR_POINT', vrt.dataset.GetMetadata()) self.assertEqual('"test"', vrt.dataset.GetMetadataItem(str('test')))
def test_set_fake_gcps_empty(self): ds = gdal.Open('NETCDF:"%s":UMass_AES' % self.test_file_arctic) vrt = VRT.copy_dataset(ds) dst_wkt = vrt._set_fake_gcps(self.nsr_wkt, [], 1) self.assertEqual(dst_wkt, self.nsr_wkt) self.assertEqual(len(vrt.dataset.GetGCPs()), 0)
def test_leave_few_bands(self): ds = gdal.Open(self.test_file_gcps) vrt = VRT.copy_dataset(ds) vrt.leave_few_bands([1, 'L_469']) self.assertEqual(vrt.dataset.RasterCount,2) self.assertEqual(vrt.dataset.GetRasterBand(1).GetMetadataItem(str('name')), 'L_645') self.assertEqual(vrt.dataset.GetRasterBand(2).GetMetadataItem(str('name')), 'L_469')
def test_fix_band_metadata(self): ds = gdal.Open(self.test_file_gcps) vrt = VRT.copy_dataset(ds) self.assertIn('standard_name', vrt.dataset.GetRasterBand(1).GetMetadata()) self.assertIn('time', vrt.dataset.GetRasterBand(1).GetMetadata()) vrt.fix_band_metadata(['standard_name', 'time']) self.assertNotIn('standard_name', vrt.dataset.GetRasterBand(1).GetMetadata()) self.assertNotIn('time', vrt.dataset.GetRasterBand(1).GetMetadata())
def test_set_gcps_geolocation_geotransform_with_geotransform(self): ds = gdal.Open('NETCDF:"%s":UMass_AES' % self.test_file_arctic) vrt = VRT.copy_dataset(ds) vrt._set_gcps_geolocation_geotransform() self.assertEqual(vrt.dataset.GetGeoTransform(), (-1000000.0, 25000.0, 0.0, 5000000.0, 0.0, -25000.0)) self.assertEqual(vrt.dataset.GetMetadata(str('GEOLOCATION')), {}) self.assertEqual(vrt.dataset.GetGCPs(), ())
def test_from_array(self): array = gdal.Open(self.test_file_gcps).ReadAsArray()[1, 10:, :] vrt = VRT.from_array(array) self.assertEqual(vrt.dataset.RasterXSize, array.shape[1]) self.assertEqual(vrt.dataset.RasterYSize, array.shape[0]) self.assertEqual(vrt.dataset.RasterCount, 1) self.assertIn('filename', list(vrt.dataset.GetMetadata().keys())) self.assertEqual(gdal.Unlink(vrt.filename.replace('.vrt', '.raw')), 0)
def test_transform_points(self): ds = gdal.Open(self.test_file_gcps) vrt1 = VRT.from_gdal_dataset(ds, metadata=ds.GetMetadata()) vrt1.tps = True lon, lat = vrt1.transform_points([1, 2, 3], [4, 5, 6]) self.assertTrue(np.allclose(lon, np.array([28.23549571, 28.24337106, 28.25126129]))) self.assertTrue(np.allclose(lat, np.array([71.52509848, 71.51913744, 71.51317568]))) lon, lat = vrt1.transform_points([], []) self.assertTrue(np.allclose(lon, np.array([]))) self.assertTrue(np.allclose(lat, np.array([])))
def test_hardcopy_bands(self): ds = gdal.Open(self.test_file_gcps) vrt = VRT.copy_dataset(ds) vrt.hardcopy_bands() self.assertTrue(np.allclose(vrt.dataset.ReadAsArray(), ds.ReadAsArray())) band_nodes = Node.create(str(vrt.xml)).nodeList('VRTRasterBand') self.assertEqual(band_nodes[0].node('SourceFilename').value, vrt.band_vrts[1].filename) self.assertEqual(band_nodes[1].node('SourceFilename').value, vrt.band_vrts[2].filename) self.assertEqual(band_nodes[2].node('SourceFilename').value, vrt.band_vrts[3].filename)
def test_init_from_gdal_dataset(self, _add_geolocation): vrt = VRT() ds = gdal.Open(self.test_file_gcps) vrt._init_from_gdal_dataset(ds) self.assertEqual(vrt.dataset.RasterXSize, ds.RasterXSize) self.assertEqual(vrt.dataset.RasterYSize, ds.RasterYSize) self.assertEqual(vrt.dataset.GetProjection(), ds.GetProjection()) self.assertEqual(vrt.dataset.GetGeoTransform(), ds.GetGeoTransform()) self.assertEqual(vrt.dataset.GetGCPProjection(), ds.GetGCPProjection()) self.assertIn('filename', list(vrt.dataset.GetMetadata().keys())) self.assertTrue(_add_geolocation.called_once())
def _create_empty_from_projection_variable( self, gdal_dataset, gdal_metadata, projection_variable='projection_lambert'): ds = Dataset(self.input_filename) subdataset = gdal.Open(self._get_sub_filenames(gdal_dataset)[0]) self._init_from_dataset_params( x_size=subdataset.RasterXSize, y_size=subdataset.RasterYSize, geo_transform=subdataset.GetGeoTransform(), projection=NSR(ds.variables[projection_variable].proj4).wkt, metadata=gdal_metadata)
def test_export(self): array = gdal.Open(self.test_file_gcps).ReadAsArray()[1, 10:, :] vrt = VRT.from_array(array) vrt.export(self.tmp_filename) self.assertTrue(self.tmp_filename) tree = ET.parse(self.tmp_filename) root = tree.getroot() self.assertEqual(root.tag, 'VRTDataset') self.assertIn('rasterXSize', list(root.keys())) self.assertIn('rasterYSize', list(root.keys())) self.assertEqual([e.tag for e in root], ['Metadata', 'VRTRasterBand'])
def __init__(self, inputFileName, gdalDataset, gdalMetadata, logLevel=30, **kwargs): # check if mapper fits if not gdalMetadata: raise WrongMapperError if not os.path.splitext(inputFileName)[1] == '.mnt': raise WrongMapperError try: mbNorthLatitude = float(gdalMetadata['NC_GLOBAL#mbNorthLatitude']) mbSouthLatitude = float(gdalMetadata['NC_GLOBAL#mbSouthLatitude']) mbEastLongitude = float(gdalMetadata['NC_GLOBAL#mbEastLongitude']) mbWestLongitude = float(gdalMetadata['NC_GLOBAL#mbWestLongitude']) mbProj4String = gdalMetadata['NC_GLOBAL#mbProj4String'] Number_lines = int(gdalMetadata['NC_GLOBAL#Number_lines']) Number_columns = int(gdalMetadata['NC_GLOBAL#Number_columns']) Element_x_size = float(gdalMetadata['NC_GLOBAL#Element_x_size']) Element_y_size = float(gdalMetadata['NC_GLOBAL#Element_y_size']) except: raise WrongMapperError # find subdataset with DEPTH subDatasets = gdalDataset.GetSubDatasets() dSourceFile = None for subDataset in subDatasets: if subDataset[0].endswith('.mnt":DEPTH'): dSourceFile = subDataset[0] if dSourceFile is None: raise WrongMapperError dSubDataset = gdal.Open(dSourceFile) dMetadata = dSubDataset.GetMetadata() try: scale_factor = dMetadata['DEPTH#scale_factor'] add_offset = dMetadata['DEPTH#add_offset'] except: raise WrongMapperError geoTransform = [mbWestLongitude, Element_x_size, 0, mbNorthLatitude, 0, -Element_y_size] # create empty VRT dataset with geolocation only self._init_from_dataset_params(Number_columns, Number_lines, geoTransform, NSR(mbProj4String).wkt, metadata=gdalMetadata) metaDict = [{'src': {'SourceFilename': dSourceFile, 'SourceBand': 1, 'ScaleRatio' : scale_factor, 'ScaleOffset' : add_offset}, 'dst': {'wkv': 'depth'}}] # add bands with metadata and corresponding values to the empty VRT self.create_bands(metaDict)
def test_export2thredds_arctic_long_lat(self): n = Nansat(self.test_file_arctic, mapper=self.default_mapper, log_level=40) tmpfilename = os.path.join(self.tmp_data_path, 'nansat_export2thredds_arctic.nc') bands = { 'Bristol': { 'type': '>i2' }, 'Bootstrap': { 'type': '>i2' }, 'UMass_AES': { 'type': '>i2' }, } n.export2thredds(tmpfilename, bands, time=datetime.datetime(2016, 1, 20)) self.assertTrue(os.path.exists(tmpfilename)) g = gdal.Open(tmpfilename) metadata = g.GetMetadata_Dict() # GDAL behaves differently: # Windows: nc-attributes are accessible without 'NC_GLOBAL#' prefix # Linux: nc-attributes are accessible only with 'NC_GLOBAL#' prefix # OSX: ? # Therefore we have to add NC_GLOBAL# and test if such metadata exists nc_prefix = 'NC_GLOBAL#' if not nc_prefix + 'easternmost_longitude' in metadata: nc_prefix = '' self.assertIn(nc_prefix + 'easternmost_longitude', metadata) # Test that the long/lat values are set correctly test_metadata_keys = [ 'easternmost_longitude', 'westernmost_longitude', 'northernmost_latitude', 'southernmost_latitude' ] test_metadata_min = [179, -180, 89.9, 53] test_metadata_max = [180, -179, 90, 54] for i, test_metadata_key in enumerate(test_metadata_keys): medata_value = float(metadata[nc_prefix + test_metadata_key]) self.assertTrue( medata_value >= test_metadata_min[i], '%s is wrong: %f' % (test_metadata_key, medata_value)) self.assertTrue( medata_value <= test_metadata_max[i], '%s is wrong: %f' % (test_metadata_key, medata_value))
def test_from_dataset_params(self): ds = gdal.Open(self.test_file_gcps) vrt = VRT.from_dataset_params(ds.RasterXSize, ds.RasterYSize, ds.GetGeoTransform(), ds.GetProjection(), ds.GetGCPs(), ds.GetGCPProjection()) self.assertEqual(vrt.dataset.RasterXSize, ds.RasterXSize) self.assertEqual(vrt.dataset.RasterYSize, ds.RasterYSize) self.assertEqual(vrt.dataset.GetProjection(), ds.GetProjection()) self.assertEqual(vrt.dataset.GetGeoTransform(), ds.GetGeoTransform()) self.assertEqual(vrt.dataset.GetGCPProjection(), ds.GetGCPProjection()) self.assertIn('filename', list(vrt.dataset.GetMetadata().keys()))
def _geoloc_band_number(self, gdal_dataset, sub_filename_index, long_name): """ Return the band number associated to a specific geolocation sub-file and long_name """ band_index = [ii for ii, ll in enumerate(self._get_sub_filenames(gdal_dataset)) if ':'+sub_filename_index in ll] if band_index: band_num = band_index[0] + 1 else: raise WrongMapperError # Check that it is actually latitudes band = gdal.Open( self._get_sub_filenames(gdal_dataset)[band_index[0]] ) if not band.GetRasterBand(1).GetMetadata()['long_name'] == long_name: raise ValueError('Cannot find %s band'%long_name) return band_num
def test_update_warped_vrt_xml(self): dataset = gdal.Open('NETCDF:"%s":UMass_AES' % self.test_file_arctic) warped_dataset = gdal.AutoCreateWarpedVRT(dataset, None, str(self.nsr_wkt), 0) warped_vrt = VRT.copy_dataset(warped_dataset) x_size = 100 y_size = 200 geo_transform = (0.0, 1.0, 0.0, 200.0, 0.0, -1.0) block_size = 64 working_data_type = 'Float32' warped_vrt._update_warped_vrt_xml(x_size, y_size, geo_transform, block_size, working_data_type) self.assertEqual(warped_vrt.dataset.RasterXSize, x_size) self.assertEqual(warped_vrt.dataset.RasterYSize, y_size) self.assertEqual(warped_vrt.dataset.GetGeoTransform(), geo_transform) self.assertEqual(warped_vrt.dataset.GetRasterBand(1).GetBlockSize(), [block_size, block_size]) self.assertIn('<WorkingDataType>Float32</WorkingDataType>', warped_vrt.xml)
def _create_empty_from_subdatasets(self, gdal_dataset, metadata): """ Create empty vrt dataset with projection but no bands """ no_projection = True # Check if the main gdal dataset has projection if gdal_dataset.GetProjection(): sub = gdal_dataset no_projection = False else: # Loop subdatasets and check for projection for fn in self._get_sub_filenames(gdal_dataset): sub = gdal.Open(fn) if sub.GetProjection(): no_projection = False break if no_projection: raise NansatMissingProjectionError # Initialise VRT with subdataset containing projection self._init_from_gdal_dataset(sub, metadata=metadata)
def test_make_source_bands_xml(self): array = gdal.Open(self.test_file_gcps).ReadAsArray()[1, 10:, :] vrt1 = VRT.from_array(array) src1 = {'SourceFilename': vrt1.filename} src2 = VRT._make_source_bands_xml(src1) self.assertIn('XML', src2) self.assertEqual(src2['SourceFilename'], vrt1.filename) self.assertEqual(src2['SourceBand'], 1) self.assertEqual(src2['LUT'], '') self.assertEqual(src2['NODATA'], '') self.assertEqual(src2['SourceType'], 'ComplexSource') self.assertEqual(src2['ScaleRatio'], 1.0) self.assertEqual(src2['ScaleOffset'], 0.0) self.assertEqual(src2['DataType'], 1) self.assertEqual(src2['xSize'], 200) self.assertEqual(src2['ySize'], 190) with self.assertRaises(KeyError): src2 = VRT._make_source_bands_xml({})
def test_buggy_var(self, mock_init): """ The last band dimensions should be latitude and longitude - otherwise gdal will fail in reading the data correctly. This is to confirm that this understanding is correct.. The shape of buggy_var is ('time', 'latitude', 'longitude', 'pressure') """ mock_init.return_value = None mm = Mapper() mm.input_filename = self.tmp_filename fn = 'NETCDF:"' + self.tmp_filename + '":buggy_var' bdict = mm._get_band_from_subfile(fn, bands=['x_wind']) self.assertEqual(bdict['src']['SourceBand'], 1) self.assertEqual(bdict['dst']['NETCDF_DIM_latitude'], '0') self.assertEqual(bdict['dst']['time_iso_8601'], np.datetime64('2019-06-15T15:00:00.000000')) subds = gdal.Open(fn) self.assertEqual(subds.RasterXSize, 7) # size of pressure dimension self.assertEqual(subds.RasterYSize, 20) # size of longitude dimension
def _create_empty_with_nansat_spatial_reference_WKT( self, gdal_dataset, gdal_metadata): """ In this case, gdal cannot find the projection of any subdatasets. We therefore assume a regular longitude/latitude grid, and set the projection to the Nansat Spatial Reference WKT [NSR().wkt], using the first subdataset as source """ # NB: if gdal_dataset has subdatasets, gdal_dataset.RasterXSize equals 512, # gdal_dataset.RasterYSize equals 512, gdal_dataset.GetGeoTransform returns # (1,0,0,0,1,0). This is wrong. fn = self._get_sub_filenames(gdal_dataset) if len(fn) == 0: subdataset = gdal_dataset else: subdataset = gdal.Open(fn[0]) self._init_from_dataset_params( x_size=subdataset.RasterXSize, y_size=subdataset.RasterYSize, geo_transform=subdataset.GetGeoTransform(), projection=NSR().wkt, metadata=gdal_metadata)
def __init__(self, filename, gdalDataset, gdalMetadata, **kwargs): """Create VRT""" try: ice_folder_name = kwargs['iceFolder'] except: #iceFolderName = '/vol/istjenesten/data/metnoCharts/' ice_folder_name = '/vol/data/metnoCharts/' keyword_base = 'metno_local_hires_seaice' if filename[0:len(keyword_base)] != keyword_base: raise WrongMapperError keyword_time = filename[len(keyword_base) + 1:] requested_time = datetime.strptime(keyword_time, '%Y%m%d') # Search for nearest available file, within the closest 3 days found_dataset = False for delta_day in [0, -1, 1, -2, 2, -3, 3]: valid_time = (requested_time + timedelta(days=delta_day) + timedelta(hours=15)) filename = (ice_folder_name + 'ice_conc_svalbard_' + valid_time.strftime('%Y%m%d1500.nc')) if os.path.exists(filename): print('Found file:') print(filename) gdal_dataset = gdal.Open(filename) gdal_metadata = gdalDataset.GetMetadata() mg.Mapper.__init__(self, filename, gdal_dataset, gdal_metadata) found_dataset = True # Modify GeoTransform from netCDF file # - otherwise a shift is seen! self.dataset.SetGeoTransform( (-1243508 - 1000, 1000, 0, -210526 - 7000, 0, -1000)) break # Data is found for this day if found_dataset is False: AttributeError("No local Svalbard-ice files available") sys.exit()
def __init__(self, filename, gdalDataset, gdalMetadata, resolution='low', **kwargs): ''' Create LANDSAT VRT from multiple tif files or single tar.gz file''' mtlFileName = '' bandFileNames = [] bandSizes = [] bandDatasets = [] fname = os.path.split(filename)[1] if (filename.endswith('.tar') or filename.endswith('.tar.gz') or filename.endswith('.tgz')): # try to open .tar or .tar.gz or .tgz file with tar try: tarFile = tarfile.open(filename) except: raise WrongMapperError # collect names of bands and corresponding sizes # into bandsInfo dict and bandSizes list tarNames = sorted(tarFile.getnames()) for tarName in tarNames: # check if TIF files inside TAR qualify if (tarName[0] in ['L', 'M'] and os.path.splitext(tarName)[1] in ['.TIF', '.tif']): # open TIF file from TAR using VSI sourceFilename = '/vsitar/%s/%s' % (filename, tarName) gdalDatasetTmp = gdal.Open(sourceFilename) # keep name, GDALDataset and size bandFileNames.append(sourceFilename) bandSizes.append(gdalDatasetTmp.RasterXSize) bandDatasets.append(gdalDatasetTmp) elif (tarName.endswith('MTL.txt') or tarName.endswith('MTL.TXT')): # get mtl file mtlFileName = tarName elif ((fname.startswith('L') or fname.startswith('M')) and (fname.endswith('.tif') or fname.endswith('.TIF') or fname.endswith('._MTL.txt'))): # try to find TIF/tif files with the same name as input file path, coreName = os.path.split(filename) coreName = os.path.splitext(coreName)[0].split('_')[0] coreNameMask = coreName + '*[tT][iI][fF]' tifNames = sorted(glob.glob(os.path.join(path, coreNameMask))) for tifName in tifNames: sourceFilename = tifName gdalDatasetTmp = gdal.Open(sourceFilename) # keep name, GDALDataset and size bandFileNames.append(sourceFilename) bandSizes.append(gdalDatasetTmp.RasterXSize) bandDatasets.append(gdalDatasetTmp) # get mtl file mtlFiles = glob.glob(coreName + '*[mM][tT][lL].[tT][xX][tT]') if len(mtlFiles) > 0: mtlFileName = mtlFiles[0] else: raise WrongMapperError # if not TIF files found - not appropriate mapper if not bandFileNames: raise WrongMapperError # get appropriate band size based on number of unique size and # required resoltuion if resolution == 'low': bandXSise = min(bandSizes) elif resolution in ['high', 'hi']: bandXSise = max(bandSizes) else: raise ValueError('Wrong resolution %s for file %s' % (resolution, filename)) # find bands with appropriate size and put to metaDict metaDict = [] for bandFileName, bandSize, bandDataset in zip(bandFileNames, bandSizes, bandDatasets): if bandSize == bandXSise: # let last part of file name be suffix bandSuffix = os.path.splitext(bandFileName)[0].split('_')[-1] metaDict.append({ 'src': { 'SourceFilename': bandFileName, 'SourceBand': 1, 'ScaleRatio': 0.1 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'suffix': bandSuffix } }) gdalDataset4Use = bandDataset # create empty VRT dataset with geolocation only self._init_from_gdal_dataset(gdalDataset4Use) # add bands with metadata and corresponding values to the empty VRT self.create_bands(metaDict) if len(mtlFileName) > 0: mtlFileName = os.path.join( os.path.split(bandFileNames[0])[0], mtlFileName) mtlFileLines = [ line.strip() for line in self.read_vsi(mtlFileName).split('\n') ] dateString = [ line.split('=')[1].strip() for line in mtlFileLines if ('DATE_ACQUIRED' in line or 'ACQUISITION_DATE' in line) ][0] timeStr = [ line.split('=')[1].strip() for line in mtlFileLines if ('SCENE_CENTER_TIME' in line or 'SCENE_CENTER_SCAN_TIME' in line) ][0] time_start = parse_time(dateString + 'T' + timeStr).isoformat() time_end = (parse_time(dateString + 'T' + timeStr) + datetime.timedelta(microseconds=60000000)).isoformat() self.dataset.SetMetadataItem('time_coverage_start', time_start) self.dataset.SetMetadataItem('time_coverage_end', time_end) # set platform platform = 'LANDSAT' if fname[2].isdigit(): platform += '-' + fname[2] ee = pti.get_gcmd_platform(platform) self.dataset.SetMetadataItem('platform', json.dumps(ee)) # set instrument instrument = { 'LANDSAT': 'MSS', 'LANDSAT-1': 'MSS', 'LANDSAT-2': 'MSS', 'LANDSAT-3': 'MSS', 'LANDSAT-4': 'TM', 'LANDSAT-5': 'TM', 'LANDSAT-7': 'ETM+', 'LANDSAT-8': 'OLI' }[platform] ee = pti.get_gcmd_instrument(instrument) self.dataset.SetMetadataItem('instrument', json.dumps(ee))
def __init__(self, filename, gdalDataset, gdalMetadata, GCP_STEP=20, MAX_LAT=90, MIN_LAT=50, resolution='low', **kwargs): ''' Create VRT Parameters ---------- GCP_COUNT : int number of GCPs along each dimention ''' ifile = os.path.split(filename)[1] if not ifile.startswith('GW1AM2_') or not ifile.endswith('.h5'): raise WrongMapperError try: ProductName = gdalMetadata['ProductName'] PlatformShortName = gdalMetadata['PlatformShortName'] SensorShortName = gdalMetadata['SensorShortName'] except: raise WrongMapperError if (not ProductName == 'AMSR2-L1R' or not PlatformShortName == 'GCOM-W1' or not SensorShortName == 'AMSR2'): raise WrongMapperError if resolution == 'low': subDatasetWidth = 243 else: subDatasetWidth = 486 # get GCPs from lon/lat grids latGrid = gdal.Open( 'HDF5:"%s"://Latitude_of_Observation_Point_for_89A' % filename).ReadAsArray() lonGrid = gdal.Open( 'HDF5:"%s"://Longitude_of_Observation_Point_for_89A' % filename).ReadAsArray() if subDatasetWidth == 243: latGrid = latGrid[:, ::2] lonGrid = lonGrid[:, ::2] dx = .5 dy = .5 gcps = [] k = 0 maxY = 0 minY = latGrid.shape[0] for i0 in range(0, latGrid.shape[0], GCP_STEP): for i1 in range(0, latGrid.shape[1], GCP_STEP): # create GCP with X,Y,pixel,line from lat/lon matrices lon = float(lonGrid[i0, i1]) lat = float(latGrid[i0, i1]) if (lon >= -180 and lon <= 180 and lat >= MIN_LAT and lat <= MAX_LAT): gcp = gdal.GCP(lon, lat, 0, i1 + dx, i0 + dy) gcps.append(gcp) k += 1 maxY = max(maxY, i0) minY = min(minY, i0) yOff = minY ySize = maxY - minY # remove Y-offset from gcps for gcp in gcps: gcp.GCPLine -= yOff metaDict = [] subDatasets = gdalDataset.GetSubDatasets() metadata = gdalDataset.GetMetadata() for subDataset in subDatasets: # select subdatasets fro that resolution (width) if (subDatasetWidth == int( subDataset[1].split(']')[0].split('x')[-1]) and 'Latitude' not in subDataset[0] and 'Longitude' not in subDataset[0]): name = subDataset[0].split('/')[-1] # find scale scale = 1 for meta in metadata: if name + '_SCALE' in meta: scale = float(metadata[meta]) # create meta entry metaEntry = { 'src': { 'SourceFilename': subDataset[0], 'sourceBand': 1, 'ScaleRatio': scale, 'ScaleOffset': 0, 'yOff': yOff, 'ySize': ySize, }, 'dst': { 'name': name } } metaDict.append(metaEntry) # create VRT from one of the subdatasets gdalSubDataset = gdal.Open(metaEntry['src']['SourceFilename']) self._init_from_dataset_params(subDatasetWidth, ySize, (1, 0, 0, ySize, 0, -1), NSR().wkt) # add bands with metadata and corresponding values to the empty VRT self.create_bands(metaDict) self.dataset.SetMetadataItem( 'time_coverage_start', parse_time(gdalMetadata['ObservationStartDateTime']).isoformat()) self.dataset.SetMetadataItem( 'time_coverage_end', parse_time(gdalMetadata['ObservationEndDateTime']).isoformat()) # append GCPs and lat/lon projection to the vsiDataset self.dataset.SetGCPs(gcps, NSR().wkt) self.reproject_gcps( '+proj=stere +datum=WGS84 +ellps=WGS84 +lat_0=90 +lon_0=0 +no_defs' ) self.tps = True mm = pti.get_gcmd_instrument('AMSR2') ee = pti.get_gcmd_platform('GCOM-W1') self.dataset.SetMetadataItem('instrument', json.dumps(mm)) self.dataset.SetMetadataItem('platform', json.dumps(ee))
def _get_band_from_subfile(self, fn, netcdf_dim={}, bands=[]): nc_ds = Dataset(self.input_filename) band_name = fn.split(':')[-1] if bands: variable = nc_ds.variables[band_name] if 'standard_name' not in variable.ncattrs( ) or not variable.standard_name in bands: raise ContinueI # TODO: consider to allow band name in addition or instead of standard_name in the band # list kwarg sub_band = nc_ds.variables[band_name] dimension_names = [b.name for b in sub_band.get_dims()] dimension_names.reverse() dim_sizes = {} for dim in sub_band.get_dims(): dim_sizes[dim.name] = dim.size # Pop spatial dimensions (longitude and latitude, or x and y) for allowed in ALLOWED_SPATIAL_DIMENSIONS_X: try: ind_dim_x = [ i for i, s in enumerate(dimension_names) if allowed in s.lower() ][0] dimension_names.pop(ind_dim_x) except IndexError: continue for allowed in ALLOWED_SPATIAL_DIMENSIONS_Y: try: ind_dim_y = [ i for i, s in enumerate(dimension_names) if allowed in s.lower() ][0] dimension_names.pop(ind_dim_y) except IndexError: continue index4key = collections.OrderedDict() for key in dimension_names: if key in netcdf_dim.keys(): val = netcdf_dim[key] if key == 'time' and type(val) == np.datetime64: # Get band number from given timestamp index = int(np.argmin(np.abs(self.times() - val))) else: index = int( np.argmin(np.abs(nc_ds.variables[key][:] - val))) index4key[key] = { 'index': index, 'size': dim_sizes[key], } else: index4key[key] = { 'index': 0, 'size': dim_sizes[key], } # Works in Python 2 and 3 class Context: band_number = 1 multiplier = 1 #band_number = 1 # Only works in python 3 #multiplier = 1 # Only works in Python 3 def get_band_number(): #nonlocal band_number # Only works in Python 3 #nonlocal multiplier # Only works in Python 3 try: name_dim0 = dimension_names.pop(0) except: return Context.band_number += index4key[name_dim0][ 'index'] * Context.multiplier Context.multiplier *= index4key[name_dim0]['size'] get_band_number() get_band_number() subds = gdal.Open(fn) band = subds.GetRasterBand(Context.band_number) band_metadata = self._clean_band_metadata(band) return self._band_dict(fn, Context.band_number, subds, band=band, band_metadata=band_metadata)
def __init__(self, inputFileName, gdalDataset, gdalMetadata, xmlonly=False, **kwargs): ''' Create Radarsat2 VRT ''' fPathName, fExt = os.path.splitext(inputFileName) if zipfile.is_zipfile(inputFileName): # Open zip file using VSI fPath, fName = os.path.split(fPathName) filename = '/vsizip/%s/%s' % (inputFileName, fName) if not 'RS' in fName[0:2]: raise WrongMapperError('%s: Provided data is not Radarsat-2' % fName) gdalDataset = gdal.Open(filename) gdalMetadata = gdalDataset.GetMetadata() else: filename = inputFileName # if it is not RADARSAT-2, return if (not gdalMetadata or not 'SATELLITE_IDENTIFIER' in list(gdalMetadata.keys())): raise WrongMapperError(filename) elif gdalMetadata['SATELLITE_IDENTIFIER'] != 'RADARSAT-2': raise WrongMapperError(filename) if zipfile.is_zipfile(inputFileName): # Open product.xml to get additional metadata zz = zipfile.ZipFile(inputFileName) productXmlName = os.path.join(os.path.basename( inputFileName).split('.')[0], 'product.xml') productXml = zz.open(productXmlName).read() else: # product.xml to get additionali metadata productXmlName = os.path.join(filename, 'product.xml') if not os.path.isfile(productXmlName): raise WrongMapperError(filename) productXml = open(productXmlName).read() if not IMPORT_SCIPY: raise NansatReadError('Radarsat-2 data cannot be read because scipy is not installed') # parse product.XML rs2_0 = Node.create(productXml) if xmlonly: self.init_from_xml(rs2_0, filename) return # Get additional metadata from product.xml rs2_1 = rs2_0.node('sourceAttributes') rs2_2 = rs2_1.node('radarParameters') if rs2_2['antennaPointing'].lower() == 'right': antennaPointing = 90 else: antennaPointing = -90 rs2_3 = rs2_1.node('orbitAndAttitude').node('orbitInformation') passDirection = rs2_3['passDirection'] # create empty VRT dataset with geolocation only self._init_from_gdal_dataset(gdalDataset) self.dataset.SetGCPs(self.dataset.GetGCPs(), NSR().wkt) # define dictionary of metadata and band specific parameters pol = [] metaDict = [] # Get the subdataset with calibrated sigma0 only for dataset in gdalDataset.GetSubDatasets(): if dataset[1] == 'Sigma Nought calibrated': s0dataset = gdal.Open(dataset[0]) s0datasetName = dataset[0][:] band = s0dataset.GetRasterBand(1) s0datasetPol = band.GetMetadata()['POLARIMETRIC_INTERP'] for i in range(1, s0dataset.RasterCount+1): iBand = s0dataset.GetRasterBand(i) polString = iBand.GetMetadata()['POLARIMETRIC_INTERP'] suffix = polString # The nansat data will be complex # if the SAR data is of type 10 dtype = iBand.DataType if dtype == 10: # add intensity band metaDict.append( {'src': {'SourceFilename': ('RADARSAT_2_CALIB:SIGMA0:' + filename + '/product.xml'), 'SourceBand': i, 'DataType': dtype}, 'dst': {'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave', 'PixelFunctionType': 'intensity', 'SourceTransferType': gdal.GetDataTypeName(dtype), 'suffix': suffix, 'polarization': polString, 'dataType': 6}}) # modify suffix for adding the compled band below suffix = polString+'_complex' pol.append(polString) metaDict.append( {'src': {'SourceFilename': ('RADARSAT_2_CALIB:SIGMA0:' + filename + '/product.xml'), 'SourceBand': i, 'DataType': dtype}, 'dst': {'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave', 'suffix': suffix, 'polarization': polString}}) if dataset[1] == 'Beta Nought calibrated': b0dataset = gdal.Open(dataset[0]) b0datasetName = dataset[0][:] for j in range(1, b0dataset.RasterCount+1): jBand = b0dataset.GetRasterBand(j) polString = jBand.GetMetadata()['POLARIMETRIC_INTERP'] if polString == s0datasetPol: b0datasetBand = j ############################### # Add SAR look direction ############################### d = Domain(ds=gdalDataset) lon, lat = d.get_geolocation_grids(100) ''' (GDAL?) Radarsat-2 data is stored with maximum latitude at first element of each column and minimum longitude at first element of each row (e.g. np.shape(lat)=(59,55) -> latitude maxima are at lat[0,:], and longitude minima are at lon[:,0]) In addition, there is an interpolation error for direct estimate along azimuth. We therefore estimate the heading along range and add 90 degrees to get the "satellite" heading. ''' if str(passDirection).upper() == 'DESCENDING': sat_heading = initial_bearing(lon[:, :-1], lat[:, :-1], lon[:, 1:], lat[:, 1:]) + 90 elif str(passDirection).upper() == 'ASCENDING': sat_heading = initial_bearing(lon[:, 1:], lat[:, 1:], lon[:, :-1], lat[:, :-1]) + 90 else: print('Can not decode pass direction: ' + str(passDirection)) # Calculate SAR look direction look_direction = sat_heading + antennaPointing # Interpolate to regain lost row look_direction = np.mod(look_direction, 360) look_direction = scipy.ndimage.interpolation.zoom( look_direction, (1, 11./10.)) # Decompose, to avoid interpolation errors around 0 <-> 360 look_direction_u = np.sin(np.deg2rad(look_direction)) look_direction_v = np.cos(np.deg2rad(look_direction)) look_u_VRT = VRT.from_array(look_direction_u) look_v_VRT = VRT.from_array(look_direction_v) # Note: If incidence angle and look direction are stored in # same VRT, access time is about twice as large lookVRT = VRT.from_lonlat(lon, lat) lookVRT.create_band( [{'SourceFilename': look_u_VRT.filename, 'SourceBand': 1}, {'SourceFilename': look_v_VRT.filename, 'SourceBand': 1}], {'PixelFunctionType': 'UVToDirectionTo'}) # Blow up to full size lookVRT = lookVRT.get_resized_vrt(gdalDataset.RasterXSize, gdalDataset.RasterYSize) # Store VRTs so that they are accessible later self.band_vrts['look_u_VRT'] = look_u_VRT self.band_vrts['look_v_VRT'] = look_v_VRT self.band_vrts['lookVRT'] = lookVRT # Add band to full sized VRT lookFileName = self.band_vrts['lookVRT'].filename metaDict.append({'src': {'SourceFilename': lookFileName, 'SourceBand': 1}, 'dst': {'wkv': 'sensor_azimuth_angle', 'name': 'look_direction'}}) ############################### # Create bands ############################### self.create_bands(metaDict) ################################################### # Add derived band (incidence angle) calculated # using pixel function "BetaSigmaToIncidence": ################################################### src = [{'SourceFilename': b0datasetName, 'SourceBand': b0datasetBand, 'DataType': dtype}, {'SourceFilename': s0datasetName, 'SourceBand': 1, 'DataType': dtype}] dst = {'wkv': 'angle_of_incidence', 'PixelFunctionType': 'BetaSigmaToIncidence', 'SourceTransferType': gdal.GetDataTypeName(dtype), '_FillValue': -10000, # NB: this is also hard-coded in # pixelfunctions.c 'dataType': 6, 'name': 'incidence_angle'} self.create_band(src, dst) self.dataset.FlushCache() ################################################################### # Add sigma0_VV - pixel function of sigma0_HH and beta0_HH # incidence angle is calculated within pixel function # It is assummed that HH is the first band in sigma0 and # beta0 sub datasets ################################################################### if 'VV' not in pol and 'HH' in pol: s0datasetNameHH = pol.index('HH')+1 src = [{'SourceFilename': s0datasetName, 'SourceBand': s0datasetNameHH, 'DataType': 6}, {'SourceFilename': b0datasetName, 'SourceBand': b0datasetBand, 'DataType': 6}] dst = {'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave', 'PixelFunctionType': 'Sigma0HHBetaToSigma0VV', 'polarization': 'VV', 'suffix': 'VV'} self.create_band(src, dst) self.dataset.FlushCache() ############################################ # Add SAR metadata ############################################ if antennaPointing == 90: self.dataset.SetMetadataItem('ANTENNA_POINTING', 'RIGHT') if antennaPointing == -90: self.dataset.SetMetadataItem('ANTENNA_POINTING', 'LEFT') self.dataset.SetMetadataItem('ORBIT_DIRECTION', str(passDirection).upper()) # set valid time self.dataset.SetMetadataItem('time_coverage_start', (parse(gdalMetadata['FIRST_LINE_TIME']). isoformat())) self.dataset.SetMetadataItem('time_coverage_end', (parse(gdalMetadata['LAST_LINE_TIME']). isoformat())) # Get dictionary describing the instrument and platform according to # the GCMD keywords mm = pti.get_gcmd_instrument("C-SAR") ee = pti.get_gcmd_platform('radarsat-2') # TODO: Validate that the found instrument and platform are indeed what we # want.... self.dataset.SetMetadataItem('instrument', json.dumps(mm)) self.dataset.SetMetadataItem('platform', json.dumps(ee)) self.dataset.SetMetadataItem('entry_title', 'Radarsat-2 SAR') self.dataset.SetMetadataItem('provider', 'MDA/GSI') self.dataset.SetMetadataItem('dataset_parameters', json.dumps( ['surface_backwards_scattering_coefficient_of_radar_wave'])) self.dataset.SetMetadataItem('entry_id', os.path.basename(filename))
def test_from_gdal_dataset(self, _init_from_gdal_dataset): ds = gdal.Open(self.test_file_gcps) vrt = VRT.from_gdal_dataset(ds) self.assertIsInstance(vrt, VRT) self.assertTrue(_init_from_gdal_dataset.called_once())
def test_get_super_vrt(self): ds = gdal.Open(self.test_file_gcps) vrt1 = VRT.from_gdal_dataset(ds, metadata=ds.GetMetadata()) vrt2 = vrt1.get_super_vrt() self.assertIsInstance(vrt2.vrt, VRT) self.assertEqual(vrt2.dataset.GetMetadataItem(str('AREA_OR_POINT')), 'Area')